##// END OF EJS Templates
scmutil: introduce binnode(ctx) as paired function with intrev(ctx)...
Yuya Nishihara -
r32656:55ff67ff default
parent child Browse files
Show More
@@ -1,3586 +1,3585 b''
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import itertools
12 12 import os
13 13 import re
14 14 import tempfile
15 15
16 16 from .i18n import _
17 17 from .node import (
18 bin,
19 18 hex,
20 19 nullid,
21 20 nullrev,
22 21 short,
23 22 )
24 23
25 24 from . import (
26 25 bookmarks,
27 26 changelog,
28 27 copies,
29 28 crecord as crecordmod,
30 29 encoding,
31 30 error,
32 31 formatter,
33 32 graphmod,
34 33 lock as lockmod,
35 34 match as matchmod,
36 35 obsolete,
37 36 patch,
38 37 pathutil,
39 38 phases,
40 39 pycompat,
41 40 registrar,
42 41 repair,
43 42 revlog,
44 43 revset,
45 44 scmutil,
46 45 smartset,
47 46 templatekw,
48 47 templater,
49 48 util,
50 49 vfs as vfsmod,
51 50 )
52 51 stringio = util.stringio
53 52
54 53 # templates of common command options
55 54
56 55 dryrunopts = [
57 56 ('n', 'dry-run', None,
58 57 _('do not perform actions, just print output')),
59 58 ]
60 59
61 60 remoteopts = [
62 61 ('e', 'ssh', '',
63 62 _('specify ssh command to use'), _('CMD')),
64 63 ('', 'remotecmd', '',
65 64 _('specify hg command to run on the remote side'), _('CMD')),
66 65 ('', 'insecure', None,
67 66 _('do not verify server certificate (ignoring web.cacerts config)')),
68 67 ]
69 68
70 69 walkopts = [
71 70 ('I', 'include', [],
72 71 _('include names matching the given patterns'), _('PATTERN')),
73 72 ('X', 'exclude', [],
74 73 _('exclude names matching the given patterns'), _('PATTERN')),
75 74 ]
76 75
77 76 commitopts = [
78 77 ('m', 'message', '',
79 78 _('use text as commit message'), _('TEXT')),
80 79 ('l', 'logfile', '',
81 80 _('read commit message from file'), _('FILE')),
82 81 ]
83 82
84 83 commitopts2 = [
85 84 ('d', 'date', '',
86 85 _('record the specified date as commit date'), _('DATE')),
87 86 ('u', 'user', '',
88 87 _('record the specified user as committer'), _('USER')),
89 88 ]
90 89
91 90 # hidden for now
92 91 formatteropts = [
93 92 ('T', 'template', '',
94 93 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
95 94 ]
96 95
97 96 templateopts = [
98 97 ('', 'style', '',
99 98 _('display using template map file (DEPRECATED)'), _('STYLE')),
100 99 ('T', 'template', '',
101 100 _('display with template'), _('TEMPLATE')),
102 101 ]
103 102
104 103 logopts = [
105 104 ('p', 'patch', None, _('show patch')),
106 105 ('g', 'git', None, _('use git extended diff format')),
107 106 ('l', 'limit', '',
108 107 _('limit number of changes displayed'), _('NUM')),
109 108 ('M', 'no-merges', None, _('do not show merges')),
110 109 ('', 'stat', None, _('output diffstat-style summary of changes')),
111 110 ('G', 'graph', None, _("show the revision DAG")),
112 111 ] + templateopts
113 112
114 113 diffopts = [
115 114 ('a', 'text', None, _('treat all files as text')),
116 115 ('g', 'git', None, _('use git extended diff format')),
117 116 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
118 117 ('', 'nodates', None, _('omit dates from diff headers'))
119 118 ]
120 119
121 120 diffwsopts = [
122 121 ('w', 'ignore-all-space', None,
123 122 _('ignore white space when comparing lines')),
124 123 ('b', 'ignore-space-change', None,
125 124 _('ignore changes in the amount of white space')),
126 125 ('B', 'ignore-blank-lines', None,
127 126 _('ignore changes whose lines are all blank')),
128 127 ]
129 128
130 129 diffopts2 = [
131 130 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
132 131 ('p', 'show-function', None, _('show which function each change is in')),
133 132 ('', 'reverse', None, _('produce a diff that undoes the changes')),
134 133 ] + diffwsopts + [
135 134 ('U', 'unified', '',
136 135 _('number of lines of context to show'), _('NUM')),
137 136 ('', 'stat', None, _('output diffstat-style summary of changes')),
138 137 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
139 138 ]
140 139
141 140 mergetoolopts = [
142 141 ('t', 'tool', '', _('specify merge tool')),
143 142 ]
144 143
145 144 similarityopts = [
146 145 ('s', 'similarity', '',
147 146 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
148 147 ]
149 148
150 149 subrepoopts = [
151 150 ('S', 'subrepos', None,
152 151 _('recurse into subrepositories'))
153 152 ]
154 153
155 154 debugrevlogopts = [
156 155 ('c', 'changelog', False, _('open changelog')),
157 156 ('m', 'manifest', False, _('open manifest')),
158 157 ('', 'dir', '', _('open directory manifest')),
159 158 ]
160 159
161 160 # special string such that everything below this line will be ingored in the
162 161 # editor text
163 162 _linebelow = "^HG: ------------------------ >8 ------------------------$"
164 163
165 164 def ishunk(x):
166 165 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
167 166 return isinstance(x, hunkclasses)
168 167
169 168 def newandmodified(chunks, originalchunks):
170 169 newlyaddedandmodifiedfiles = set()
171 170 for chunk in chunks:
172 171 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
173 172 originalchunks:
174 173 newlyaddedandmodifiedfiles.add(chunk.header.filename())
175 174 return newlyaddedandmodifiedfiles
176 175
177 176 def parsealiases(cmd):
178 177 return cmd.lstrip("^").split("|")
179 178
180 179 def setupwrapcolorwrite(ui):
181 180 # wrap ui.write so diff output can be labeled/colorized
182 181 def wrapwrite(orig, *args, **kw):
183 182 label = kw.pop('label', '')
184 183 for chunk, l in patch.difflabel(lambda: args):
185 184 orig(chunk, label=label + l)
186 185
187 186 oldwrite = ui.write
188 187 def wrap(*args, **kwargs):
189 188 return wrapwrite(oldwrite, *args, **kwargs)
190 189 setattr(ui, 'write', wrap)
191 190 return oldwrite
192 191
193 192 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
194 193 if usecurses:
195 194 if testfile:
196 195 recordfn = crecordmod.testdecorator(testfile,
197 196 crecordmod.testchunkselector)
198 197 else:
199 198 recordfn = crecordmod.chunkselector
200 199
201 200 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
202 201
203 202 else:
204 203 return patch.filterpatch(ui, originalhunks, operation)
205 204
206 205 def recordfilter(ui, originalhunks, operation=None):
207 206 """ Prompts the user to filter the originalhunks and return a list of
208 207 selected hunks.
209 208 *operation* is used for to build ui messages to indicate the user what
210 209 kind of filtering they are doing: reverting, committing, shelving, etc.
211 210 (see patch.filterpatch).
212 211 """
213 212 usecurses = crecordmod.checkcurses(ui)
214 213 testfile = ui.config('experimental', 'crecordtest', None)
215 214 oldwrite = setupwrapcolorwrite(ui)
216 215 try:
217 216 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
218 217 testfile, operation)
219 218 finally:
220 219 ui.write = oldwrite
221 220 return newchunks, newopts
222 221
223 222 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
224 223 filterfn, *pats, **opts):
225 224 from . import merge as mergemod
226 225 opts = pycompat.byteskwargs(opts)
227 226 if not ui.interactive():
228 227 if cmdsuggest:
229 228 msg = _('running non-interactively, use %s instead') % cmdsuggest
230 229 else:
231 230 msg = _('running non-interactively')
232 231 raise error.Abort(msg)
233 232
234 233 # make sure username is set before going interactive
235 234 if not opts.get('user'):
236 235 ui.username() # raise exception, username not provided
237 236
238 237 def recordfunc(ui, repo, message, match, opts):
239 238 """This is generic record driver.
240 239
241 240 Its job is to interactively filter local changes, and
242 241 accordingly prepare working directory into a state in which the
243 242 job can be delegated to a non-interactive commit command such as
244 243 'commit' or 'qrefresh'.
245 244
246 245 After the actual job is done by non-interactive command, the
247 246 working directory is restored to its original state.
248 247
249 248 In the end we'll record interesting changes, and everything else
250 249 will be left in place, so the user can continue working.
251 250 """
252 251
253 252 checkunfinished(repo, commit=True)
254 253 wctx = repo[None]
255 254 merge = len(wctx.parents()) > 1
256 255 if merge:
257 256 raise error.Abort(_('cannot partially commit a merge '
258 257 '(use "hg commit" instead)'))
259 258
260 259 def fail(f, msg):
261 260 raise error.Abort('%s: %s' % (f, msg))
262 261
263 262 force = opts.get('force')
264 263 if not force:
265 264 vdirs = []
266 265 match.explicitdir = vdirs.append
267 266 match.bad = fail
268 267
269 268 status = repo.status(match=match)
270 269 if not force:
271 270 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
272 271 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
273 272 diffopts.nodates = True
274 273 diffopts.git = True
275 274 diffopts.showfunc = True
276 275 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
277 276 originalchunks = patch.parsepatch(originaldiff)
278 277
279 278 # 1. filter patch, since we are intending to apply subset of it
280 279 try:
281 280 chunks, newopts = filterfn(ui, originalchunks)
282 281 except patch.PatchError as err:
283 282 raise error.Abort(_('error parsing patch: %s') % err)
284 283 opts.update(newopts)
285 284
286 285 # We need to keep a backup of files that have been newly added and
287 286 # modified during the recording process because there is a previous
288 287 # version without the edit in the workdir
289 288 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
290 289 contenders = set()
291 290 for h in chunks:
292 291 try:
293 292 contenders.update(set(h.files()))
294 293 except AttributeError:
295 294 pass
296 295
297 296 changed = status.modified + status.added + status.removed
298 297 newfiles = [f for f in changed if f in contenders]
299 298 if not newfiles:
300 299 ui.status(_('no changes to record\n'))
301 300 return 0
302 301
303 302 modified = set(status.modified)
304 303
305 304 # 2. backup changed files, so we can restore them in the end
306 305
307 306 if backupall:
308 307 tobackup = changed
309 308 else:
310 309 tobackup = [f for f in newfiles if f in modified or f in \
311 310 newlyaddedandmodifiedfiles]
312 311 backups = {}
313 312 if tobackup:
314 313 backupdir = repo.vfs.join('record-backups')
315 314 try:
316 315 os.mkdir(backupdir)
317 316 except OSError as err:
318 317 if err.errno != errno.EEXIST:
319 318 raise
320 319 try:
321 320 # backup continues
322 321 for f in tobackup:
323 322 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
324 323 dir=backupdir)
325 324 os.close(fd)
326 325 ui.debug('backup %r as %r\n' % (f, tmpname))
327 326 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
328 327 backups[f] = tmpname
329 328
330 329 fp = stringio()
331 330 for c in chunks:
332 331 fname = c.filename()
333 332 if fname in backups:
334 333 c.write(fp)
335 334 dopatch = fp.tell()
336 335 fp.seek(0)
337 336
338 337 # 2.5 optionally review / modify patch in text editor
339 338 if opts.get('review', False):
340 339 patchtext = (crecordmod.diffhelptext
341 340 + crecordmod.patchhelptext
342 341 + fp.read())
343 342 reviewedpatch = ui.edit(patchtext, "",
344 343 extra={"suffix": ".diff"},
345 344 repopath=repo.path)
346 345 fp.truncate(0)
347 346 fp.write(reviewedpatch)
348 347 fp.seek(0)
349 348
350 349 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
351 350 # 3a. apply filtered patch to clean repo (clean)
352 351 if backups:
353 352 # Equivalent to hg.revert
354 353 m = scmutil.matchfiles(repo, backups.keys())
355 354 mergemod.update(repo, repo.dirstate.p1(),
356 355 False, True, matcher=m)
357 356
358 357 # 3b. (apply)
359 358 if dopatch:
360 359 try:
361 360 ui.debug('applying patch\n')
362 361 ui.debug(fp.getvalue())
363 362 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
364 363 except patch.PatchError as err:
365 364 raise error.Abort(str(err))
366 365 del fp
367 366
368 367 # 4. We prepared working directory according to filtered
369 368 # patch. Now is the time to delegate the job to
370 369 # commit/qrefresh or the like!
371 370
372 371 # Make all of the pathnames absolute.
373 372 newfiles = [repo.wjoin(nf) for nf in newfiles]
374 373 return commitfunc(ui, repo, *newfiles, **opts)
375 374 finally:
376 375 # 5. finally restore backed-up files
377 376 try:
378 377 dirstate = repo.dirstate
379 378 for realname, tmpname in backups.iteritems():
380 379 ui.debug('restoring %r to %r\n' % (tmpname, realname))
381 380
382 381 if dirstate[realname] == 'n':
383 382 # without normallookup, restoring timestamp
384 383 # may cause partially committed files
385 384 # to be treated as unmodified
386 385 dirstate.normallookup(realname)
387 386
388 387 # copystat=True here and above are a hack to trick any
389 388 # editors that have f open that we haven't modified them.
390 389 #
391 390 # Also note that this racy as an editor could notice the
392 391 # file's mtime before we've finished writing it.
393 392 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
394 393 os.unlink(tmpname)
395 394 if tobackup:
396 395 os.rmdir(backupdir)
397 396 except OSError:
398 397 pass
399 398
400 399 def recordinwlock(ui, repo, message, match, opts):
401 400 with repo.wlock():
402 401 return recordfunc(ui, repo, message, match, opts)
403 402
404 403 return commit(ui, repo, recordinwlock, pats, opts)
405 404
406 405 def findpossible(cmd, table, strict=False):
407 406 """
408 407 Return cmd -> (aliases, command table entry)
409 408 for each matching command.
410 409 Return debug commands (or their aliases) only if no normal command matches.
411 410 """
412 411 choice = {}
413 412 debugchoice = {}
414 413
415 414 if cmd in table:
416 415 # short-circuit exact matches, "log" alias beats "^log|history"
417 416 keys = [cmd]
418 417 else:
419 418 keys = table.keys()
420 419
421 420 allcmds = []
422 421 for e in keys:
423 422 aliases = parsealiases(e)
424 423 allcmds.extend(aliases)
425 424 found = None
426 425 if cmd in aliases:
427 426 found = cmd
428 427 elif not strict:
429 428 for a in aliases:
430 429 if a.startswith(cmd):
431 430 found = a
432 431 break
433 432 if found is not None:
434 433 if aliases[0].startswith("debug") or found.startswith("debug"):
435 434 debugchoice[found] = (aliases, table[e])
436 435 else:
437 436 choice[found] = (aliases, table[e])
438 437
439 438 if not choice and debugchoice:
440 439 choice = debugchoice
441 440
442 441 return choice, allcmds
443 442
444 443 def findcmd(cmd, table, strict=True):
445 444 """Return (aliases, command table entry) for command string."""
446 445 choice, allcmds = findpossible(cmd, table, strict)
447 446
448 447 if cmd in choice:
449 448 return choice[cmd]
450 449
451 450 if len(choice) > 1:
452 451 clist = sorted(choice)
453 452 raise error.AmbiguousCommand(cmd, clist)
454 453
455 454 if choice:
456 455 return choice.values()[0]
457 456
458 457 raise error.UnknownCommand(cmd, allcmds)
459 458
460 459 def findrepo(p):
461 460 while not os.path.isdir(os.path.join(p, ".hg")):
462 461 oldp, p = p, os.path.dirname(p)
463 462 if p == oldp:
464 463 return None
465 464
466 465 return p
467 466
468 467 def bailifchanged(repo, merge=True, hint=None):
469 468 """ enforce the precondition that working directory must be clean.
470 469
471 470 'merge' can be set to false if a pending uncommitted merge should be
472 471 ignored (such as when 'update --check' runs).
473 472
474 473 'hint' is the usual hint given to Abort exception.
475 474 """
476 475
477 476 if merge and repo.dirstate.p2() != nullid:
478 477 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
479 478 modified, added, removed, deleted = repo.status()[:4]
480 479 if modified or added or removed or deleted:
481 480 raise error.Abort(_('uncommitted changes'), hint=hint)
482 481 ctx = repo[None]
483 482 for s in sorted(ctx.substate):
484 483 ctx.sub(s).bailifchanged(hint=hint)
485 484
486 485 def logmessage(ui, opts):
487 486 """ get the log message according to -m and -l option """
488 487 message = opts.get('message')
489 488 logfile = opts.get('logfile')
490 489
491 490 if message and logfile:
492 491 raise error.Abort(_('options --message and --logfile are mutually '
493 492 'exclusive'))
494 493 if not message and logfile:
495 494 try:
496 495 if isstdiofilename(logfile):
497 496 message = ui.fin.read()
498 497 else:
499 498 message = '\n'.join(util.readfile(logfile).splitlines())
500 499 except IOError as inst:
501 500 raise error.Abort(_("can't read commit message '%s': %s") %
502 501 (logfile, inst.strerror))
503 502 return message
504 503
505 504 def mergeeditform(ctxorbool, baseformname):
506 505 """return appropriate editform name (referencing a committemplate)
507 506
508 507 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
509 508 merging is committed.
510 509
511 510 This returns baseformname with '.merge' appended if it is a merge,
512 511 otherwise '.normal' is appended.
513 512 """
514 513 if isinstance(ctxorbool, bool):
515 514 if ctxorbool:
516 515 return baseformname + ".merge"
517 516 elif 1 < len(ctxorbool.parents()):
518 517 return baseformname + ".merge"
519 518
520 519 return baseformname + ".normal"
521 520
522 521 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
523 522 editform='', **opts):
524 523 """get appropriate commit message editor according to '--edit' option
525 524
526 525 'finishdesc' is a function to be called with edited commit message
527 526 (= 'description' of the new changeset) just after editing, but
528 527 before checking empty-ness. It should return actual text to be
529 528 stored into history. This allows to change description before
530 529 storing.
531 530
532 531 'extramsg' is a extra message to be shown in the editor instead of
533 532 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
534 533 is automatically added.
535 534
536 535 'editform' is a dot-separated list of names, to distinguish
537 536 the purpose of commit text editing.
538 537
539 538 'getcommiteditor' returns 'commitforceeditor' regardless of
540 539 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
541 540 they are specific for usage in MQ.
542 541 """
543 542 if edit or finishdesc or extramsg:
544 543 return lambda r, c, s: commitforceeditor(r, c, s,
545 544 finishdesc=finishdesc,
546 545 extramsg=extramsg,
547 546 editform=editform)
548 547 elif editform:
549 548 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
550 549 else:
551 550 return commiteditor
552 551
553 552 def loglimit(opts):
554 553 """get the log limit according to option -l/--limit"""
555 554 limit = opts.get('limit')
556 555 if limit:
557 556 try:
558 557 limit = int(limit)
559 558 except ValueError:
560 559 raise error.Abort(_('limit must be a positive integer'))
561 560 if limit <= 0:
562 561 raise error.Abort(_('limit must be positive'))
563 562 else:
564 563 limit = None
565 564 return limit
566 565
567 566 def makefilename(repo, pat, node, desc=None,
568 567 total=None, seqno=None, revwidth=None, pathname=None):
569 568 node_expander = {
570 569 'H': lambda: hex(node),
571 570 'R': lambda: str(repo.changelog.rev(node)),
572 571 'h': lambda: short(node),
573 572 'm': lambda: re.sub('[^\w]', '_', str(desc))
574 573 }
575 574 expander = {
576 575 '%': lambda: '%',
577 576 'b': lambda: os.path.basename(repo.root),
578 577 }
579 578
580 579 try:
581 580 if node:
582 581 expander.update(node_expander)
583 582 if node:
584 583 expander['r'] = (lambda:
585 584 str(repo.changelog.rev(node)).zfill(revwidth or 0))
586 585 if total is not None:
587 586 expander['N'] = lambda: str(total)
588 587 if seqno is not None:
589 588 expander['n'] = lambda: str(seqno)
590 589 if total is not None and seqno is not None:
591 590 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
592 591 if pathname is not None:
593 592 expander['s'] = lambda: os.path.basename(pathname)
594 593 expander['d'] = lambda: os.path.dirname(pathname) or '.'
595 594 expander['p'] = lambda: pathname
596 595
597 596 newname = []
598 597 patlen = len(pat)
599 598 i = 0
600 599 while i < patlen:
601 600 c = pat[i:i + 1]
602 601 if c == '%':
603 602 i += 1
604 603 c = pat[i:i + 1]
605 604 c = expander[c]()
606 605 newname.append(c)
607 606 i += 1
608 607 return ''.join(newname)
609 608 except KeyError as inst:
610 609 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
611 610 inst.args[0])
612 611
613 612 def isstdiofilename(pat):
614 613 """True if the given pat looks like a filename denoting stdin/stdout"""
615 614 return not pat or pat == '-'
616 615
617 616 class _unclosablefile(object):
618 617 def __init__(self, fp):
619 618 self._fp = fp
620 619
621 620 def close(self):
622 621 pass
623 622
624 623 def __iter__(self):
625 624 return iter(self._fp)
626 625
627 626 def __getattr__(self, attr):
628 627 return getattr(self._fp, attr)
629 628
630 629 def __enter__(self):
631 630 return self
632 631
633 632 def __exit__(self, exc_type, exc_value, exc_tb):
634 633 pass
635 634
636 635 def makefileobj(repo, pat, node=None, desc=None, total=None,
637 636 seqno=None, revwidth=None, mode='wb', modemap=None,
638 637 pathname=None):
639 638
640 639 writable = mode not in ('r', 'rb')
641 640
642 641 if isstdiofilename(pat):
643 642 if writable:
644 643 fp = repo.ui.fout
645 644 else:
646 645 fp = repo.ui.fin
647 646 return _unclosablefile(fp)
648 647 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
649 648 if modemap is not None:
650 649 mode = modemap.get(fn, mode)
651 650 if mode == 'wb':
652 651 modemap[fn] = 'ab'
653 652 return open(fn, mode)
654 653
655 654 def openrevlog(repo, cmd, file_, opts):
656 655 """opens the changelog, manifest, a filelog or a given revlog"""
657 656 cl = opts['changelog']
658 657 mf = opts['manifest']
659 658 dir = opts['dir']
660 659 msg = None
661 660 if cl and mf:
662 661 msg = _('cannot specify --changelog and --manifest at the same time')
663 662 elif cl and dir:
664 663 msg = _('cannot specify --changelog and --dir at the same time')
665 664 elif cl or mf or dir:
666 665 if file_:
667 666 msg = _('cannot specify filename with --changelog or --manifest')
668 667 elif not repo:
669 668 msg = _('cannot specify --changelog or --manifest or --dir '
670 669 'without a repository')
671 670 if msg:
672 671 raise error.Abort(msg)
673 672
674 673 r = None
675 674 if repo:
676 675 if cl:
677 676 r = repo.unfiltered().changelog
678 677 elif dir:
679 678 if 'treemanifest' not in repo.requirements:
680 679 raise error.Abort(_("--dir can only be used on repos with "
681 680 "treemanifest enabled"))
682 681 dirlog = repo.manifestlog._revlog.dirlog(dir)
683 682 if len(dirlog):
684 683 r = dirlog
685 684 elif mf:
686 685 r = repo.manifestlog._revlog
687 686 elif file_:
688 687 filelog = repo.file(file_)
689 688 if len(filelog):
690 689 r = filelog
691 690 if not r:
692 691 if not file_:
693 692 raise error.CommandError(cmd, _('invalid arguments'))
694 693 if not os.path.isfile(file_):
695 694 raise error.Abort(_("revlog '%s' not found") % file_)
696 695 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
697 696 file_[:-2] + ".i")
698 697 return r
699 698
700 699 def copy(ui, repo, pats, opts, rename=False):
701 700 # called with the repo lock held
702 701 #
703 702 # hgsep => pathname that uses "/" to separate directories
704 703 # ossep => pathname that uses os.sep to separate directories
705 704 cwd = repo.getcwd()
706 705 targets = {}
707 706 after = opts.get("after")
708 707 dryrun = opts.get("dry_run")
709 708 wctx = repo[None]
710 709
711 710 def walkpat(pat):
712 711 srcs = []
713 712 if after:
714 713 badstates = '?'
715 714 else:
716 715 badstates = '?r'
717 716 m = scmutil.match(wctx, [pat], opts, globbed=True)
718 717 for abs in wctx.walk(m):
719 718 state = repo.dirstate[abs]
720 719 rel = m.rel(abs)
721 720 exact = m.exact(abs)
722 721 if state in badstates:
723 722 if exact and state == '?':
724 723 ui.warn(_('%s: not copying - file is not managed\n') % rel)
725 724 if exact and state == 'r':
726 725 ui.warn(_('%s: not copying - file has been marked for'
727 726 ' remove\n') % rel)
728 727 continue
729 728 # abs: hgsep
730 729 # rel: ossep
731 730 srcs.append((abs, rel, exact))
732 731 return srcs
733 732
734 733 # abssrc: hgsep
735 734 # relsrc: ossep
736 735 # otarget: ossep
737 736 def copyfile(abssrc, relsrc, otarget, exact):
738 737 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
739 738 if '/' in abstarget:
740 739 # We cannot normalize abstarget itself, this would prevent
741 740 # case only renames, like a => A.
742 741 abspath, absname = abstarget.rsplit('/', 1)
743 742 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
744 743 reltarget = repo.pathto(abstarget, cwd)
745 744 target = repo.wjoin(abstarget)
746 745 src = repo.wjoin(abssrc)
747 746 state = repo.dirstate[abstarget]
748 747
749 748 scmutil.checkportable(ui, abstarget)
750 749
751 750 # check for collisions
752 751 prevsrc = targets.get(abstarget)
753 752 if prevsrc is not None:
754 753 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
755 754 (reltarget, repo.pathto(abssrc, cwd),
756 755 repo.pathto(prevsrc, cwd)))
757 756 return
758 757
759 758 # check for overwrites
760 759 exists = os.path.lexists(target)
761 760 samefile = False
762 761 if exists and abssrc != abstarget:
763 762 if (repo.dirstate.normalize(abssrc) ==
764 763 repo.dirstate.normalize(abstarget)):
765 764 if not rename:
766 765 ui.warn(_("%s: can't copy - same file\n") % reltarget)
767 766 return
768 767 exists = False
769 768 samefile = True
770 769
771 770 if not after and exists or after and state in 'mn':
772 771 if not opts['force']:
773 772 if state in 'mn':
774 773 msg = _('%s: not overwriting - file already committed\n')
775 774 if after:
776 775 flags = '--after --force'
777 776 else:
778 777 flags = '--force'
779 778 if rename:
780 779 hint = _('(hg rename %s to replace the file by '
781 780 'recording a rename)\n') % flags
782 781 else:
783 782 hint = _('(hg copy %s to replace the file by '
784 783 'recording a copy)\n') % flags
785 784 else:
786 785 msg = _('%s: not overwriting - file exists\n')
787 786 if rename:
788 787 hint = _('(hg rename --after to record the rename)\n')
789 788 else:
790 789 hint = _('(hg copy --after to record the copy)\n')
791 790 ui.warn(msg % reltarget)
792 791 ui.warn(hint)
793 792 return
794 793
795 794 if after:
796 795 if not exists:
797 796 if rename:
798 797 ui.warn(_('%s: not recording move - %s does not exist\n') %
799 798 (relsrc, reltarget))
800 799 else:
801 800 ui.warn(_('%s: not recording copy - %s does not exist\n') %
802 801 (relsrc, reltarget))
803 802 return
804 803 elif not dryrun:
805 804 try:
806 805 if exists:
807 806 os.unlink(target)
808 807 targetdir = os.path.dirname(target) or '.'
809 808 if not os.path.isdir(targetdir):
810 809 os.makedirs(targetdir)
811 810 if samefile:
812 811 tmp = target + "~hgrename"
813 812 os.rename(src, tmp)
814 813 os.rename(tmp, target)
815 814 else:
816 815 util.copyfile(src, target)
817 816 srcexists = True
818 817 except IOError as inst:
819 818 if inst.errno == errno.ENOENT:
820 819 ui.warn(_('%s: deleted in working directory\n') % relsrc)
821 820 srcexists = False
822 821 else:
823 822 ui.warn(_('%s: cannot copy - %s\n') %
824 823 (relsrc, inst.strerror))
825 824 return True # report a failure
826 825
827 826 if ui.verbose or not exact:
828 827 if rename:
829 828 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
830 829 else:
831 830 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
832 831
833 832 targets[abstarget] = abssrc
834 833
835 834 # fix up dirstate
836 835 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
837 836 dryrun=dryrun, cwd=cwd)
838 837 if rename and not dryrun:
839 838 if not after and srcexists and not samefile:
840 839 repo.wvfs.unlinkpath(abssrc)
841 840 wctx.forget([abssrc])
842 841
843 842 # pat: ossep
844 843 # dest ossep
845 844 # srcs: list of (hgsep, hgsep, ossep, bool)
846 845 # return: function that takes hgsep and returns ossep
847 846 def targetpathfn(pat, dest, srcs):
848 847 if os.path.isdir(pat):
849 848 abspfx = pathutil.canonpath(repo.root, cwd, pat)
850 849 abspfx = util.localpath(abspfx)
851 850 if destdirexists:
852 851 striplen = len(os.path.split(abspfx)[0])
853 852 else:
854 853 striplen = len(abspfx)
855 854 if striplen:
856 855 striplen += len(pycompat.ossep)
857 856 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
858 857 elif destdirexists:
859 858 res = lambda p: os.path.join(dest,
860 859 os.path.basename(util.localpath(p)))
861 860 else:
862 861 res = lambda p: dest
863 862 return res
864 863
865 864 # pat: ossep
866 865 # dest ossep
867 866 # srcs: list of (hgsep, hgsep, ossep, bool)
868 867 # return: function that takes hgsep and returns ossep
869 868 def targetpathafterfn(pat, dest, srcs):
870 869 if matchmod.patkind(pat):
871 870 # a mercurial pattern
872 871 res = lambda p: os.path.join(dest,
873 872 os.path.basename(util.localpath(p)))
874 873 else:
875 874 abspfx = pathutil.canonpath(repo.root, cwd, pat)
876 875 if len(abspfx) < len(srcs[0][0]):
877 876 # A directory. Either the target path contains the last
878 877 # component of the source path or it does not.
879 878 def evalpath(striplen):
880 879 score = 0
881 880 for s in srcs:
882 881 t = os.path.join(dest, util.localpath(s[0])[striplen:])
883 882 if os.path.lexists(t):
884 883 score += 1
885 884 return score
886 885
887 886 abspfx = util.localpath(abspfx)
888 887 striplen = len(abspfx)
889 888 if striplen:
890 889 striplen += len(pycompat.ossep)
891 890 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
892 891 score = evalpath(striplen)
893 892 striplen1 = len(os.path.split(abspfx)[0])
894 893 if striplen1:
895 894 striplen1 += len(pycompat.ossep)
896 895 if evalpath(striplen1) > score:
897 896 striplen = striplen1
898 897 res = lambda p: os.path.join(dest,
899 898 util.localpath(p)[striplen:])
900 899 else:
901 900 # a file
902 901 if destdirexists:
903 902 res = lambda p: os.path.join(dest,
904 903 os.path.basename(util.localpath(p)))
905 904 else:
906 905 res = lambda p: dest
907 906 return res
908 907
909 908 pats = scmutil.expandpats(pats)
910 909 if not pats:
911 910 raise error.Abort(_('no source or destination specified'))
912 911 if len(pats) == 1:
913 912 raise error.Abort(_('no destination specified'))
914 913 dest = pats.pop()
915 914 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
916 915 if not destdirexists:
917 916 if len(pats) > 1 or matchmod.patkind(pats[0]):
918 917 raise error.Abort(_('with multiple sources, destination must be an '
919 918 'existing directory'))
920 919 if util.endswithsep(dest):
921 920 raise error.Abort(_('destination %s is not a directory') % dest)
922 921
923 922 tfn = targetpathfn
924 923 if after:
925 924 tfn = targetpathafterfn
926 925 copylist = []
927 926 for pat in pats:
928 927 srcs = walkpat(pat)
929 928 if not srcs:
930 929 continue
931 930 copylist.append((tfn(pat, dest, srcs), srcs))
932 931 if not copylist:
933 932 raise error.Abort(_('no files to copy'))
934 933
935 934 errors = 0
936 935 for targetpath, srcs in copylist:
937 936 for abssrc, relsrc, exact in srcs:
938 937 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
939 938 errors += 1
940 939
941 940 if errors:
942 941 ui.warn(_('(consider using --after)\n'))
943 942
944 943 return errors != 0
945 944
946 945 ## facility to let extension process additional data into an import patch
947 946 # list of identifier to be executed in order
948 947 extrapreimport = [] # run before commit
949 948 extrapostimport = [] # run after commit
950 949 # mapping from identifier to actual import function
951 950 #
952 951 # 'preimport' are run before the commit is made and are provided the following
953 952 # arguments:
954 953 # - repo: the localrepository instance,
955 954 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
956 955 # - extra: the future extra dictionary of the changeset, please mutate it,
957 956 # - opts: the import options.
958 957 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
959 958 # mutation of in memory commit and more. Feel free to rework the code to get
960 959 # there.
961 960 extrapreimportmap = {}
962 961 # 'postimport' are run after the commit is made and are provided the following
963 962 # argument:
964 963 # - ctx: the changectx created by import.
965 964 extrapostimportmap = {}
966 965
967 966 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
968 967 """Utility function used by commands.import to import a single patch
969 968
970 969 This function is explicitly defined here to help the evolve extension to
971 970 wrap this part of the import logic.
972 971
973 972 The API is currently a bit ugly because it a simple code translation from
974 973 the import command. Feel free to make it better.
975 974
976 975 :hunk: a patch (as a binary string)
977 976 :parents: nodes that will be parent of the created commit
978 977 :opts: the full dict of option passed to the import command
979 978 :msgs: list to save commit message to.
980 979 (used in case we need to save it when failing)
981 980 :updatefunc: a function that update a repo to a given node
982 981 updatefunc(<repo>, <node>)
983 982 """
984 983 # avoid cycle context -> subrepo -> cmdutil
985 984 from . import context
986 985 extractdata = patch.extract(ui, hunk)
987 986 tmpname = extractdata.get('filename')
988 987 message = extractdata.get('message')
989 988 user = opts.get('user') or extractdata.get('user')
990 989 date = opts.get('date') or extractdata.get('date')
991 990 branch = extractdata.get('branch')
992 991 nodeid = extractdata.get('nodeid')
993 992 p1 = extractdata.get('p1')
994 993 p2 = extractdata.get('p2')
995 994
996 995 nocommit = opts.get('no_commit')
997 996 importbranch = opts.get('import_branch')
998 997 update = not opts.get('bypass')
999 998 strip = opts["strip"]
1000 999 prefix = opts["prefix"]
1001 1000 sim = float(opts.get('similarity') or 0)
1002 1001 if not tmpname:
1003 1002 return (None, None, False)
1004 1003
1005 1004 rejects = False
1006 1005
1007 1006 try:
1008 1007 cmdline_message = logmessage(ui, opts)
1009 1008 if cmdline_message:
1010 1009 # pickup the cmdline msg
1011 1010 message = cmdline_message
1012 1011 elif message:
1013 1012 # pickup the patch msg
1014 1013 message = message.strip()
1015 1014 else:
1016 1015 # launch the editor
1017 1016 message = None
1018 1017 ui.debug('message:\n%s\n' % message)
1019 1018
1020 1019 if len(parents) == 1:
1021 1020 parents.append(repo[nullid])
1022 1021 if opts.get('exact'):
1023 1022 if not nodeid or not p1:
1024 1023 raise error.Abort(_('not a Mercurial patch'))
1025 1024 p1 = repo[p1]
1026 1025 p2 = repo[p2 or nullid]
1027 1026 elif p2:
1028 1027 try:
1029 1028 p1 = repo[p1]
1030 1029 p2 = repo[p2]
1031 1030 # Without any options, consider p2 only if the
1032 1031 # patch is being applied on top of the recorded
1033 1032 # first parent.
1034 1033 if p1 != parents[0]:
1035 1034 p1 = parents[0]
1036 1035 p2 = repo[nullid]
1037 1036 except error.RepoError:
1038 1037 p1, p2 = parents
1039 1038 if p2.node() == nullid:
1040 1039 ui.warn(_("warning: import the patch as a normal revision\n"
1041 1040 "(use --exact to import the patch as a merge)\n"))
1042 1041 else:
1043 1042 p1, p2 = parents
1044 1043
1045 1044 n = None
1046 1045 if update:
1047 1046 if p1 != parents[0]:
1048 1047 updatefunc(repo, p1.node())
1049 1048 if p2 != parents[1]:
1050 1049 repo.setparents(p1.node(), p2.node())
1051 1050
1052 1051 if opts.get('exact') or importbranch:
1053 1052 repo.dirstate.setbranch(branch or 'default')
1054 1053
1055 1054 partial = opts.get('partial', False)
1056 1055 files = set()
1057 1056 try:
1058 1057 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1059 1058 files=files, eolmode=None, similarity=sim / 100.0)
1060 1059 except patch.PatchError as e:
1061 1060 if not partial:
1062 1061 raise error.Abort(str(e))
1063 1062 if partial:
1064 1063 rejects = True
1065 1064
1066 1065 files = list(files)
1067 1066 if nocommit:
1068 1067 if message:
1069 1068 msgs.append(message)
1070 1069 else:
1071 1070 if opts.get('exact') or p2:
1072 1071 # If you got here, you either use --force and know what
1073 1072 # you are doing or used --exact or a merge patch while
1074 1073 # being updated to its first parent.
1075 1074 m = None
1076 1075 else:
1077 1076 m = scmutil.matchfiles(repo, files or [])
1078 1077 editform = mergeeditform(repo[None], 'import.normal')
1079 1078 if opts.get('exact'):
1080 1079 editor = None
1081 1080 else:
1082 1081 editor = getcommiteditor(editform=editform, **opts)
1083 1082 extra = {}
1084 1083 for idfunc in extrapreimport:
1085 1084 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1086 1085 overrides = {}
1087 1086 if partial:
1088 1087 overrides[('ui', 'allowemptycommit')] = True
1089 1088 with repo.ui.configoverride(overrides, 'import'):
1090 1089 n = repo.commit(message, user,
1091 1090 date, match=m,
1092 1091 editor=editor, extra=extra)
1093 1092 for idfunc in extrapostimport:
1094 1093 extrapostimportmap[idfunc](repo[n])
1095 1094 else:
1096 1095 if opts.get('exact') or importbranch:
1097 1096 branch = branch or 'default'
1098 1097 else:
1099 1098 branch = p1.branch()
1100 1099 store = patch.filestore()
1101 1100 try:
1102 1101 files = set()
1103 1102 try:
1104 1103 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1105 1104 files, eolmode=None)
1106 1105 except patch.PatchError as e:
1107 1106 raise error.Abort(str(e))
1108 1107 if opts.get('exact'):
1109 1108 editor = None
1110 1109 else:
1111 1110 editor = getcommiteditor(editform='import.bypass')
1112 1111 memctx = context.makememctx(repo, (p1.node(), p2.node()),
1113 1112 message,
1114 1113 user,
1115 1114 date,
1116 1115 branch, files, store,
1117 1116 editor=editor)
1118 1117 n = memctx.commit()
1119 1118 finally:
1120 1119 store.close()
1121 1120 if opts.get('exact') and nocommit:
1122 1121 # --exact with --no-commit is still useful in that it does merge
1123 1122 # and branch bits
1124 1123 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1125 1124 elif opts.get('exact') and hex(n) != nodeid:
1126 1125 raise error.Abort(_('patch is damaged or loses information'))
1127 1126 msg = _('applied to working directory')
1128 1127 if n:
1129 1128 # i18n: refers to a short changeset id
1130 1129 msg = _('created %s') % short(n)
1131 1130 return (msg, n, rejects)
1132 1131 finally:
1133 1132 os.unlink(tmpname)
1134 1133
1135 1134 # facility to let extensions include additional data in an exported patch
1136 1135 # list of identifiers to be executed in order
1137 1136 extraexport = []
1138 1137 # mapping from identifier to actual export function
1139 1138 # function as to return a string to be added to the header or None
1140 1139 # it is given two arguments (sequencenumber, changectx)
1141 1140 extraexportmap = {}
1142 1141
1143 1142 def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts):
1144 1143 node = ctx.node()
1145 1144 parents = [p.node() for p in ctx.parents() if p]
1146 1145 branch = ctx.branch()
1147 1146 if switch_parent:
1148 1147 parents.reverse()
1149 1148
1150 1149 if parents:
1151 1150 prev = parents[0]
1152 1151 else:
1153 1152 prev = nullid
1154 1153
1155 1154 write("# HG changeset patch\n")
1156 1155 write("# User %s\n" % ctx.user())
1157 1156 write("# Date %d %d\n" % ctx.date())
1158 1157 write("# %s\n" % util.datestr(ctx.date()))
1159 1158 if branch and branch != 'default':
1160 1159 write("# Branch %s\n" % branch)
1161 1160 write("# Node ID %s\n" % hex(node))
1162 1161 write("# Parent %s\n" % hex(prev))
1163 1162 if len(parents) > 1:
1164 1163 write("# Parent %s\n" % hex(parents[1]))
1165 1164
1166 1165 for headerid in extraexport:
1167 1166 header = extraexportmap[headerid](seqno, ctx)
1168 1167 if header is not None:
1169 1168 write('# %s\n' % header)
1170 1169 write(ctx.description().rstrip())
1171 1170 write("\n\n")
1172 1171
1173 1172 for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts):
1174 1173 write(chunk, label=label)
1175 1174
1176 1175 def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False,
1177 1176 opts=None, match=None):
1178 1177 '''export changesets as hg patches
1179 1178
1180 1179 Args:
1181 1180 repo: The repository from which we're exporting revisions.
1182 1181 revs: A list of revisions to export as revision numbers.
1183 1182 fntemplate: An optional string to use for generating patch file names.
1184 1183 fp: An optional file-like object to which patches should be written.
1185 1184 switch_parent: If True, show diffs against second parent when not nullid.
1186 1185 Default is false, which always shows diff against p1.
1187 1186 opts: diff options to use for generating the patch.
1188 1187 match: If specified, only export changes to files matching this matcher.
1189 1188
1190 1189 Returns:
1191 1190 Nothing.
1192 1191
1193 1192 Side Effect:
1194 1193 "HG Changeset Patch" data is emitted to one of the following
1195 1194 destinations:
1196 1195 fp is specified: All revs are written to the specified
1197 1196 file-like object.
1198 1197 fntemplate specified: Each rev is written to a unique file named using
1199 1198 the given template.
1200 1199 Neither fp nor template specified: All revs written to repo.ui.write()
1201 1200 '''
1202 1201
1203 1202 total = len(revs)
1204 1203 revwidth = max(len(str(rev)) for rev in revs)
1205 1204 filemode = {}
1206 1205
1207 1206 write = None
1208 1207 dest = '<unnamed>'
1209 1208 if fp:
1210 1209 dest = getattr(fp, 'name', dest)
1211 1210 def write(s, **kw):
1212 1211 fp.write(s)
1213 1212 elif not fntemplate:
1214 1213 write = repo.ui.write
1215 1214
1216 1215 for seqno, rev in enumerate(revs, 1):
1217 1216 ctx = repo[rev]
1218 1217 fo = None
1219 1218 if not fp and fntemplate:
1220 1219 desc_lines = ctx.description().rstrip().split('\n')
1221 1220 desc = desc_lines[0] #Commit always has a first line.
1222 1221 fo = makefileobj(repo, fntemplate, ctx.node(), desc=desc,
1223 1222 total=total, seqno=seqno, revwidth=revwidth,
1224 1223 mode='wb', modemap=filemode)
1225 1224 dest = fo.name
1226 1225 def write(s, **kw):
1227 1226 fo.write(s)
1228 1227 if not dest.startswith('<'):
1229 1228 repo.ui.note("%s\n" % dest)
1230 1229 _exportsingle(
1231 1230 repo, ctx, match, switch_parent, rev, seqno, write, opts)
1232 1231 if fo is not None:
1233 1232 fo.close()
1234 1233
1235 1234 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1236 1235 changes=None, stat=False, fp=None, prefix='',
1237 1236 root='', listsubrepos=False):
1238 1237 '''show diff or diffstat.'''
1239 1238 if fp is None:
1240 1239 write = ui.write
1241 1240 else:
1242 1241 def write(s, **kw):
1243 1242 fp.write(s)
1244 1243
1245 1244 if root:
1246 1245 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1247 1246 else:
1248 1247 relroot = ''
1249 1248 if relroot != '':
1250 1249 # XXX relative roots currently don't work if the root is within a
1251 1250 # subrepo
1252 1251 uirelroot = match.uipath(relroot)
1253 1252 relroot += '/'
1254 1253 for matchroot in match.files():
1255 1254 if not matchroot.startswith(relroot):
1256 1255 ui.warn(_('warning: %s not inside relative root %s\n') % (
1257 1256 match.uipath(matchroot), uirelroot))
1258 1257
1259 1258 if stat:
1260 1259 diffopts = diffopts.copy(context=0)
1261 1260 width = 80
1262 1261 if not ui.plain():
1263 1262 width = ui.termwidth()
1264 1263 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1265 1264 prefix=prefix, relroot=relroot)
1266 1265 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1267 1266 width=width):
1268 1267 write(chunk, label=label)
1269 1268 else:
1270 1269 for chunk, label in patch.diffui(repo, node1, node2, match,
1271 1270 changes, diffopts, prefix=prefix,
1272 1271 relroot=relroot):
1273 1272 write(chunk, label=label)
1274 1273
1275 1274 if listsubrepos:
1276 1275 ctx1 = repo[node1]
1277 1276 ctx2 = repo[node2]
1278 1277 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1279 1278 tempnode2 = node2
1280 1279 try:
1281 1280 if node2 is not None:
1282 1281 tempnode2 = ctx2.substate[subpath][1]
1283 1282 except KeyError:
1284 1283 # A subrepo that existed in node1 was deleted between node1 and
1285 1284 # node2 (inclusive). Thus, ctx2's substate won't contain that
1286 1285 # subpath. The best we can do is to ignore it.
1287 1286 tempnode2 = None
1288 1287 submatch = matchmod.subdirmatcher(subpath, match)
1289 1288 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1290 1289 stat=stat, fp=fp, prefix=prefix)
1291 1290
1292 1291 def _changesetlabels(ctx):
1293 1292 labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()]
1294 1293 if ctx.obsolete():
1295 1294 labels.append('changeset.obsolete')
1296 1295 if ctx.troubled():
1297 1296 labels.append('changeset.troubled')
1298 1297 for trouble in ctx.troubles():
1299 1298 labels.append('trouble.%s' % trouble)
1300 1299 return ' '.join(labels)
1301 1300
1302 1301 class changeset_printer(object):
1303 1302 '''show changeset information when templating not requested.'''
1304 1303
1305 1304 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1306 1305 self.ui = ui
1307 1306 self.repo = repo
1308 1307 self.buffered = buffered
1309 1308 self.matchfn = matchfn
1310 1309 self.diffopts = diffopts
1311 1310 self.header = {}
1312 1311 self.hunk = {}
1313 1312 self.lastheader = None
1314 1313 self.footer = None
1315 1314
1316 1315 def flush(self, ctx):
1317 1316 rev = ctx.rev()
1318 1317 if rev in self.header:
1319 1318 h = self.header[rev]
1320 1319 if h != self.lastheader:
1321 1320 self.lastheader = h
1322 1321 self.ui.write(h)
1323 1322 del self.header[rev]
1324 1323 if rev in self.hunk:
1325 1324 self.ui.write(self.hunk[rev])
1326 1325 del self.hunk[rev]
1327 1326 return 1
1328 1327 return 0
1329 1328
1330 1329 def close(self):
1331 1330 if self.footer:
1332 1331 self.ui.write(self.footer)
1333 1332
1334 1333 def show(self, ctx, copies=None, matchfn=None, **props):
1335 1334 if self.buffered:
1336 1335 self.ui.pushbuffer(labeled=True)
1337 1336 self._show(ctx, copies, matchfn, props)
1338 1337 self.hunk[ctx.rev()] = self.ui.popbuffer()
1339 1338 else:
1340 1339 self._show(ctx, copies, matchfn, props)
1341 1340
1342 1341 def _show(self, ctx, copies, matchfn, props):
1343 1342 '''show a single changeset or file revision'''
1344 1343 changenode = ctx.node()
1345 1344 rev = ctx.rev()
1346 1345 if self.ui.debugflag:
1347 1346 hexfunc = hex
1348 1347 else:
1349 1348 hexfunc = short
1350 1349 # as of now, wctx.node() and wctx.rev() return None, but we want to
1351 1350 # show the same values as {node} and {rev} templatekw
1352 revnode = (scmutil.intrev(ctx), hexfunc(bin(ctx.hex())))
1351 revnode = (scmutil.intrev(ctx), hexfunc(scmutil.binnode(ctx)))
1353 1352
1354 1353 if self.ui.quiet:
1355 1354 self.ui.write("%d:%s\n" % revnode, label='log.node')
1356 1355 return
1357 1356
1358 1357 date = util.datestr(ctx.date())
1359 1358
1360 1359 # i18n: column positioning for "hg log"
1361 1360 self.ui.write(_("changeset: %d:%s\n") % revnode,
1362 1361 label=_changesetlabels(ctx))
1363 1362
1364 1363 # branches are shown first before any other names due to backwards
1365 1364 # compatibility
1366 1365 branch = ctx.branch()
1367 1366 # don't show the default branch name
1368 1367 if branch != 'default':
1369 1368 # i18n: column positioning for "hg log"
1370 1369 self.ui.write(_("branch: %s\n") % branch,
1371 1370 label='log.branch')
1372 1371
1373 1372 for nsname, ns in self.repo.names.iteritems():
1374 1373 # branches has special logic already handled above, so here we just
1375 1374 # skip it
1376 1375 if nsname == 'branches':
1377 1376 continue
1378 1377 # we will use the templatename as the color name since those two
1379 1378 # should be the same
1380 1379 for name in ns.names(self.repo, changenode):
1381 1380 self.ui.write(ns.logfmt % name,
1382 1381 label='log.%s' % ns.colorname)
1383 1382 if self.ui.debugflag:
1384 1383 # i18n: column positioning for "hg log"
1385 1384 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1386 1385 label='log.phase')
1387 1386 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1388 1387 label = 'log.parent changeset.%s' % pctx.phasestr()
1389 1388 # i18n: column positioning for "hg log"
1390 1389 self.ui.write(_("parent: %d:%s\n")
1391 1390 % (pctx.rev(), hexfunc(pctx.node())),
1392 1391 label=label)
1393 1392
1394 1393 if self.ui.debugflag and rev is not None:
1395 1394 mnode = ctx.manifestnode()
1396 1395 # i18n: column positioning for "hg log"
1397 1396 self.ui.write(_("manifest: %d:%s\n") %
1398 1397 (self.repo.manifestlog._revlog.rev(mnode),
1399 1398 hex(mnode)),
1400 1399 label='ui.debug log.manifest')
1401 1400 # i18n: column positioning for "hg log"
1402 1401 self.ui.write(_("user: %s\n") % ctx.user(),
1403 1402 label='log.user')
1404 1403 # i18n: column positioning for "hg log"
1405 1404 self.ui.write(_("date: %s\n") % date,
1406 1405 label='log.date')
1407 1406
1408 1407 if ctx.troubled():
1409 1408 # i18n: column positioning for "hg log"
1410 1409 self.ui.write(_("trouble: %s\n") % ', '.join(ctx.troubles()),
1411 1410 label='log.trouble')
1412 1411
1413 1412 if self.ui.debugflag:
1414 1413 files = ctx.p1().status(ctx)[:3]
1415 1414 for key, value in zip([# i18n: column positioning for "hg log"
1416 1415 _("files:"),
1417 1416 # i18n: column positioning for "hg log"
1418 1417 _("files+:"),
1419 1418 # i18n: column positioning for "hg log"
1420 1419 _("files-:")], files):
1421 1420 if value:
1422 1421 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1423 1422 label='ui.debug log.files')
1424 1423 elif ctx.files() and self.ui.verbose:
1425 1424 # i18n: column positioning for "hg log"
1426 1425 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1427 1426 label='ui.note log.files')
1428 1427 if copies and self.ui.verbose:
1429 1428 copies = ['%s (%s)' % c for c in copies]
1430 1429 # i18n: column positioning for "hg log"
1431 1430 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1432 1431 label='ui.note log.copies')
1433 1432
1434 1433 extra = ctx.extra()
1435 1434 if extra and self.ui.debugflag:
1436 1435 for key, value in sorted(extra.items()):
1437 1436 # i18n: column positioning for "hg log"
1438 1437 self.ui.write(_("extra: %s=%s\n")
1439 1438 % (key, util.escapestr(value)),
1440 1439 label='ui.debug log.extra')
1441 1440
1442 1441 description = ctx.description().strip()
1443 1442 if description:
1444 1443 if self.ui.verbose:
1445 1444 self.ui.write(_("description:\n"),
1446 1445 label='ui.note log.description')
1447 1446 self.ui.write(description,
1448 1447 label='ui.note log.description')
1449 1448 self.ui.write("\n\n")
1450 1449 else:
1451 1450 # i18n: column positioning for "hg log"
1452 1451 self.ui.write(_("summary: %s\n") %
1453 1452 description.splitlines()[0],
1454 1453 label='log.summary')
1455 1454 self.ui.write("\n")
1456 1455
1457 1456 self.showpatch(ctx, matchfn)
1458 1457
1459 1458 def showpatch(self, ctx, matchfn):
1460 1459 if not matchfn:
1461 1460 matchfn = self.matchfn
1462 1461 if matchfn:
1463 1462 stat = self.diffopts.get('stat')
1464 1463 diff = self.diffopts.get('patch')
1465 1464 diffopts = patch.diffallopts(self.ui, self.diffopts)
1466 1465 node = ctx.node()
1467 1466 prev = ctx.p1().node()
1468 1467 if stat:
1469 1468 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1470 1469 match=matchfn, stat=True)
1471 1470 if diff:
1472 1471 if stat:
1473 1472 self.ui.write("\n")
1474 1473 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1475 1474 match=matchfn, stat=False)
1476 1475 self.ui.write("\n")
1477 1476
1478 1477 class jsonchangeset(changeset_printer):
1479 1478 '''format changeset information.'''
1480 1479
1481 1480 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1482 1481 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1483 1482 self.cache = {}
1484 1483 self._first = True
1485 1484
1486 1485 def close(self):
1487 1486 if not self._first:
1488 1487 self.ui.write("\n]\n")
1489 1488 else:
1490 1489 self.ui.write("[]\n")
1491 1490
1492 1491 def _show(self, ctx, copies, matchfn, props):
1493 1492 '''show a single changeset or file revision'''
1494 1493 rev = ctx.rev()
1495 1494 if rev is None:
1496 1495 jrev = jnode = 'null'
1497 1496 else:
1498 1497 jrev = '%d' % rev
1499 1498 jnode = '"%s"' % hex(ctx.node())
1500 1499 j = encoding.jsonescape
1501 1500
1502 1501 if self._first:
1503 1502 self.ui.write("[\n {")
1504 1503 self._first = False
1505 1504 else:
1506 1505 self.ui.write(",\n {")
1507 1506
1508 1507 if self.ui.quiet:
1509 1508 self.ui.write(('\n "rev": %s') % jrev)
1510 1509 self.ui.write((',\n "node": %s') % jnode)
1511 1510 self.ui.write('\n }')
1512 1511 return
1513 1512
1514 1513 self.ui.write(('\n "rev": %s') % jrev)
1515 1514 self.ui.write((',\n "node": %s') % jnode)
1516 1515 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1517 1516 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1518 1517 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1519 1518 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1520 1519 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1521 1520
1522 1521 self.ui.write((',\n "bookmarks": [%s]') %
1523 1522 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1524 1523 self.ui.write((',\n "tags": [%s]') %
1525 1524 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1526 1525 self.ui.write((',\n "parents": [%s]') %
1527 1526 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1528 1527
1529 1528 if self.ui.debugflag:
1530 1529 if rev is None:
1531 1530 jmanifestnode = 'null'
1532 1531 else:
1533 1532 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1534 1533 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1535 1534
1536 1535 self.ui.write((',\n "extra": {%s}') %
1537 1536 ", ".join('"%s": "%s"' % (j(k), j(v))
1538 1537 for k, v in ctx.extra().items()))
1539 1538
1540 1539 files = ctx.p1().status(ctx)
1541 1540 self.ui.write((',\n "modified": [%s]') %
1542 1541 ", ".join('"%s"' % j(f) for f in files[0]))
1543 1542 self.ui.write((',\n "added": [%s]') %
1544 1543 ", ".join('"%s"' % j(f) for f in files[1]))
1545 1544 self.ui.write((',\n "removed": [%s]') %
1546 1545 ", ".join('"%s"' % j(f) for f in files[2]))
1547 1546
1548 1547 elif self.ui.verbose:
1549 1548 self.ui.write((',\n "files": [%s]') %
1550 1549 ", ".join('"%s"' % j(f) for f in ctx.files()))
1551 1550
1552 1551 if copies:
1553 1552 self.ui.write((',\n "copies": {%s}') %
1554 1553 ", ".join('"%s": "%s"' % (j(k), j(v))
1555 1554 for k, v in copies))
1556 1555
1557 1556 matchfn = self.matchfn
1558 1557 if matchfn:
1559 1558 stat = self.diffopts.get('stat')
1560 1559 diff = self.diffopts.get('patch')
1561 1560 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1562 1561 node, prev = ctx.node(), ctx.p1().node()
1563 1562 if stat:
1564 1563 self.ui.pushbuffer()
1565 1564 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1566 1565 match=matchfn, stat=True)
1567 1566 self.ui.write((',\n "diffstat": "%s"')
1568 1567 % j(self.ui.popbuffer()))
1569 1568 if diff:
1570 1569 self.ui.pushbuffer()
1571 1570 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1572 1571 match=matchfn, stat=False)
1573 1572 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1574 1573
1575 1574 self.ui.write("\n }")
1576 1575
1577 1576 class changeset_templater(changeset_printer):
1578 1577 '''format changeset information.'''
1579 1578
1580 1579 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1581 1580 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1582 1581 assert not (tmpl and mapfile)
1583 1582 defaulttempl = templatekw.defaulttempl
1584 1583 if mapfile:
1585 1584 self.t = templater.templater.frommapfile(mapfile,
1586 1585 cache=defaulttempl)
1587 1586 else:
1588 1587 self.t = formatter.maketemplater(ui, 'changeset', tmpl,
1589 1588 cache=defaulttempl)
1590 1589
1591 1590 self._counter = itertools.count()
1592 1591 self.cache = {}
1593 1592
1594 1593 # find correct templates for current mode
1595 1594 tmplmodes = [
1596 1595 (True, None),
1597 1596 (self.ui.verbose, 'verbose'),
1598 1597 (self.ui.quiet, 'quiet'),
1599 1598 (self.ui.debugflag, 'debug'),
1600 1599 ]
1601 1600
1602 1601 self._parts = {'header': '', 'footer': '', 'changeset': 'changeset',
1603 1602 'docheader': '', 'docfooter': ''}
1604 1603 for mode, postfix in tmplmodes:
1605 1604 for t in self._parts:
1606 1605 cur = t
1607 1606 if postfix:
1608 1607 cur += "_" + postfix
1609 1608 if mode and cur in self.t:
1610 1609 self._parts[t] = cur
1611 1610
1612 1611 if self._parts['docheader']:
1613 1612 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1614 1613
1615 1614 def close(self):
1616 1615 if self._parts['docfooter']:
1617 1616 if not self.footer:
1618 1617 self.footer = ""
1619 1618 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1620 1619 return super(changeset_templater, self).close()
1621 1620
1622 1621 def _show(self, ctx, copies, matchfn, props):
1623 1622 '''show a single changeset or file revision'''
1624 1623 props = props.copy()
1625 1624 props.update(templatekw.keywords)
1626 1625 props['templ'] = self.t
1627 1626 props['ctx'] = ctx
1628 1627 props['repo'] = self.repo
1629 1628 props['ui'] = self.repo.ui
1630 1629 props['index'] = next(self._counter)
1631 1630 props['revcache'] = {'copies': copies}
1632 1631 props['cache'] = self.cache
1633 1632 props = pycompat.strkwargs(props)
1634 1633
1635 1634 # write header
1636 1635 if self._parts['header']:
1637 1636 h = templater.stringify(self.t(self._parts['header'], **props))
1638 1637 if self.buffered:
1639 1638 self.header[ctx.rev()] = h
1640 1639 else:
1641 1640 if self.lastheader != h:
1642 1641 self.lastheader = h
1643 1642 self.ui.write(h)
1644 1643
1645 1644 # write changeset metadata, then patch if requested
1646 1645 key = self._parts['changeset']
1647 1646 self.ui.write(templater.stringify(self.t(key, **props)))
1648 1647 self.showpatch(ctx, matchfn)
1649 1648
1650 1649 if self._parts['footer']:
1651 1650 if not self.footer:
1652 1651 self.footer = templater.stringify(
1653 1652 self.t(self._parts['footer'], **props))
1654 1653
1655 1654 def gettemplate(ui, tmpl, style):
1656 1655 """
1657 1656 Find the template matching the given template spec or style.
1658 1657 """
1659 1658
1660 1659 # ui settings
1661 1660 if not tmpl and not style: # template are stronger than style
1662 1661 tmpl = ui.config('ui', 'logtemplate')
1663 1662 if tmpl:
1664 1663 return templater.unquotestring(tmpl), None
1665 1664 else:
1666 1665 style = util.expandpath(ui.config('ui', 'style', ''))
1667 1666
1668 1667 if not tmpl and style:
1669 1668 mapfile = style
1670 1669 if not os.path.split(mapfile)[0]:
1671 1670 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1672 1671 or templater.templatepath(mapfile))
1673 1672 if mapname:
1674 1673 mapfile = mapname
1675 1674 return None, mapfile
1676 1675
1677 1676 if not tmpl:
1678 1677 return None, None
1679 1678
1680 1679 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1681 1680
1682 1681 def show_changeset(ui, repo, opts, buffered=False):
1683 1682 """show one changeset using template or regular display.
1684 1683
1685 1684 Display format will be the first non-empty hit of:
1686 1685 1. option 'template'
1687 1686 2. option 'style'
1688 1687 3. [ui] setting 'logtemplate'
1689 1688 4. [ui] setting 'style'
1690 1689 If all of these values are either the unset or the empty string,
1691 1690 regular display via changeset_printer() is done.
1692 1691 """
1693 1692 # options
1694 1693 matchfn = None
1695 1694 if opts.get('patch') or opts.get('stat'):
1696 1695 matchfn = scmutil.matchall(repo)
1697 1696
1698 1697 if opts.get('template') == 'json':
1699 1698 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1700 1699
1701 1700 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1702 1701
1703 1702 if not tmpl and not mapfile:
1704 1703 return changeset_printer(ui, repo, matchfn, opts, buffered)
1705 1704
1706 1705 return changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile, buffered)
1707 1706
1708 1707 def showmarker(fm, marker, index=None):
1709 1708 """utility function to display obsolescence marker in a readable way
1710 1709
1711 1710 To be used by debug function."""
1712 1711 if index is not None:
1713 1712 fm.write('index', '%i ', index)
1714 1713 fm.write('precnode', '%s ', hex(marker.precnode()))
1715 1714 succs = marker.succnodes()
1716 1715 fm.condwrite(succs, 'succnodes', '%s ',
1717 1716 fm.formatlist(map(hex, succs), name='node'))
1718 1717 fm.write('flag', '%X ', marker.flags())
1719 1718 parents = marker.parentnodes()
1720 1719 if parents is not None:
1721 1720 fm.write('parentnodes', '{%s} ',
1722 1721 fm.formatlist(map(hex, parents), name='node', sep=', '))
1723 1722 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1724 1723 meta = marker.metadata().copy()
1725 1724 meta.pop('date', None)
1726 1725 fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
1727 1726 fm.plain('\n')
1728 1727
1729 1728 def finddate(ui, repo, date):
1730 1729 """Find the tipmost changeset that matches the given date spec"""
1731 1730
1732 1731 df = util.matchdate(date)
1733 1732 m = scmutil.matchall(repo)
1734 1733 results = {}
1735 1734
1736 1735 def prep(ctx, fns):
1737 1736 d = ctx.date()
1738 1737 if df(d[0]):
1739 1738 results[ctx.rev()] = d
1740 1739
1741 1740 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1742 1741 rev = ctx.rev()
1743 1742 if rev in results:
1744 1743 ui.status(_("found revision %s from %s\n") %
1745 1744 (rev, util.datestr(results[rev])))
1746 1745 return '%d' % rev
1747 1746
1748 1747 raise error.Abort(_("revision matching date not found"))
1749 1748
1750 1749 def increasingwindows(windowsize=8, sizelimit=512):
1751 1750 while True:
1752 1751 yield windowsize
1753 1752 if windowsize < sizelimit:
1754 1753 windowsize *= 2
1755 1754
1756 1755 class FileWalkError(Exception):
1757 1756 pass
1758 1757
1759 1758 def walkfilerevs(repo, match, follow, revs, fncache):
1760 1759 '''Walks the file history for the matched files.
1761 1760
1762 1761 Returns the changeset revs that are involved in the file history.
1763 1762
1764 1763 Throws FileWalkError if the file history can't be walked using
1765 1764 filelogs alone.
1766 1765 '''
1767 1766 wanted = set()
1768 1767 copies = []
1769 1768 minrev, maxrev = min(revs), max(revs)
1770 1769 def filerevgen(filelog, last):
1771 1770 """
1772 1771 Only files, no patterns. Check the history of each file.
1773 1772
1774 1773 Examines filelog entries within minrev, maxrev linkrev range
1775 1774 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1776 1775 tuples in backwards order
1777 1776 """
1778 1777 cl_count = len(repo)
1779 1778 revs = []
1780 1779 for j in xrange(0, last + 1):
1781 1780 linkrev = filelog.linkrev(j)
1782 1781 if linkrev < minrev:
1783 1782 continue
1784 1783 # only yield rev for which we have the changelog, it can
1785 1784 # happen while doing "hg log" during a pull or commit
1786 1785 if linkrev >= cl_count:
1787 1786 break
1788 1787
1789 1788 parentlinkrevs = []
1790 1789 for p in filelog.parentrevs(j):
1791 1790 if p != nullrev:
1792 1791 parentlinkrevs.append(filelog.linkrev(p))
1793 1792 n = filelog.node(j)
1794 1793 revs.append((linkrev, parentlinkrevs,
1795 1794 follow and filelog.renamed(n)))
1796 1795
1797 1796 return reversed(revs)
1798 1797 def iterfiles():
1799 1798 pctx = repo['.']
1800 1799 for filename in match.files():
1801 1800 if follow:
1802 1801 if filename not in pctx:
1803 1802 raise error.Abort(_('cannot follow file not in parent '
1804 1803 'revision: "%s"') % filename)
1805 1804 yield filename, pctx[filename].filenode()
1806 1805 else:
1807 1806 yield filename, None
1808 1807 for filename_node in copies:
1809 1808 yield filename_node
1810 1809
1811 1810 for file_, node in iterfiles():
1812 1811 filelog = repo.file(file_)
1813 1812 if not len(filelog):
1814 1813 if node is None:
1815 1814 # A zero count may be a directory or deleted file, so
1816 1815 # try to find matching entries on the slow path.
1817 1816 if follow:
1818 1817 raise error.Abort(
1819 1818 _('cannot follow nonexistent file: "%s"') % file_)
1820 1819 raise FileWalkError("Cannot walk via filelog")
1821 1820 else:
1822 1821 continue
1823 1822
1824 1823 if node is None:
1825 1824 last = len(filelog) - 1
1826 1825 else:
1827 1826 last = filelog.rev(node)
1828 1827
1829 1828 # keep track of all ancestors of the file
1830 1829 ancestors = {filelog.linkrev(last)}
1831 1830
1832 1831 # iterate from latest to oldest revision
1833 1832 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1834 1833 if not follow:
1835 1834 if rev > maxrev:
1836 1835 continue
1837 1836 else:
1838 1837 # Note that last might not be the first interesting
1839 1838 # rev to us:
1840 1839 # if the file has been changed after maxrev, we'll
1841 1840 # have linkrev(last) > maxrev, and we still need
1842 1841 # to explore the file graph
1843 1842 if rev not in ancestors:
1844 1843 continue
1845 1844 # XXX insert 1327 fix here
1846 1845 if flparentlinkrevs:
1847 1846 ancestors.update(flparentlinkrevs)
1848 1847
1849 1848 fncache.setdefault(rev, []).append(file_)
1850 1849 wanted.add(rev)
1851 1850 if copied:
1852 1851 copies.append(copied)
1853 1852
1854 1853 return wanted
1855 1854
1856 1855 class _followfilter(object):
1857 1856 def __init__(self, repo, onlyfirst=False):
1858 1857 self.repo = repo
1859 1858 self.startrev = nullrev
1860 1859 self.roots = set()
1861 1860 self.onlyfirst = onlyfirst
1862 1861
1863 1862 def match(self, rev):
1864 1863 def realparents(rev):
1865 1864 if self.onlyfirst:
1866 1865 return self.repo.changelog.parentrevs(rev)[0:1]
1867 1866 else:
1868 1867 return filter(lambda x: x != nullrev,
1869 1868 self.repo.changelog.parentrevs(rev))
1870 1869
1871 1870 if self.startrev == nullrev:
1872 1871 self.startrev = rev
1873 1872 return True
1874 1873
1875 1874 if rev > self.startrev:
1876 1875 # forward: all descendants
1877 1876 if not self.roots:
1878 1877 self.roots.add(self.startrev)
1879 1878 for parent in realparents(rev):
1880 1879 if parent in self.roots:
1881 1880 self.roots.add(rev)
1882 1881 return True
1883 1882 else:
1884 1883 # backwards: all parents
1885 1884 if not self.roots:
1886 1885 self.roots.update(realparents(self.startrev))
1887 1886 if rev in self.roots:
1888 1887 self.roots.remove(rev)
1889 1888 self.roots.update(realparents(rev))
1890 1889 return True
1891 1890
1892 1891 return False
1893 1892
1894 1893 def walkchangerevs(repo, match, opts, prepare):
1895 1894 '''Iterate over files and the revs in which they changed.
1896 1895
1897 1896 Callers most commonly need to iterate backwards over the history
1898 1897 in which they are interested. Doing so has awful (quadratic-looking)
1899 1898 performance, so we use iterators in a "windowed" way.
1900 1899
1901 1900 We walk a window of revisions in the desired order. Within the
1902 1901 window, we first walk forwards to gather data, then in the desired
1903 1902 order (usually backwards) to display it.
1904 1903
1905 1904 This function returns an iterator yielding contexts. Before
1906 1905 yielding each context, the iterator will first call the prepare
1907 1906 function on each context in the window in forward order.'''
1908 1907
1909 1908 follow = opts.get('follow') or opts.get('follow_first')
1910 1909 revs = _logrevs(repo, opts)
1911 1910 if not revs:
1912 1911 return []
1913 1912 wanted = set()
1914 1913 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1915 1914 opts.get('removed'))
1916 1915 fncache = {}
1917 1916 change = repo.changectx
1918 1917
1919 1918 # First step is to fill wanted, the set of revisions that we want to yield.
1920 1919 # When it does not induce extra cost, we also fill fncache for revisions in
1921 1920 # wanted: a cache of filenames that were changed (ctx.files()) and that
1922 1921 # match the file filtering conditions.
1923 1922
1924 1923 if match.always():
1925 1924 # No files, no patterns. Display all revs.
1926 1925 wanted = revs
1927 1926 elif not slowpath:
1928 1927 # We only have to read through the filelog to find wanted revisions
1929 1928
1930 1929 try:
1931 1930 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1932 1931 except FileWalkError:
1933 1932 slowpath = True
1934 1933
1935 1934 # We decided to fall back to the slowpath because at least one
1936 1935 # of the paths was not a file. Check to see if at least one of them
1937 1936 # existed in history, otherwise simply return
1938 1937 for path in match.files():
1939 1938 if path == '.' or path in repo.store:
1940 1939 break
1941 1940 else:
1942 1941 return []
1943 1942
1944 1943 if slowpath:
1945 1944 # We have to read the changelog to match filenames against
1946 1945 # changed files
1947 1946
1948 1947 if follow:
1949 1948 raise error.Abort(_('can only follow copies/renames for explicit '
1950 1949 'filenames'))
1951 1950
1952 1951 # The slow path checks files modified in every changeset.
1953 1952 # This is really slow on large repos, so compute the set lazily.
1954 1953 class lazywantedset(object):
1955 1954 def __init__(self):
1956 1955 self.set = set()
1957 1956 self.revs = set(revs)
1958 1957
1959 1958 # No need to worry about locality here because it will be accessed
1960 1959 # in the same order as the increasing window below.
1961 1960 def __contains__(self, value):
1962 1961 if value in self.set:
1963 1962 return True
1964 1963 elif not value in self.revs:
1965 1964 return False
1966 1965 else:
1967 1966 self.revs.discard(value)
1968 1967 ctx = change(value)
1969 1968 matches = filter(match, ctx.files())
1970 1969 if matches:
1971 1970 fncache[value] = matches
1972 1971 self.set.add(value)
1973 1972 return True
1974 1973 return False
1975 1974
1976 1975 def discard(self, value):
1977 1976 self.revs.discard(value)
1978 1977 self.set.discard(value)
1979 1978
1980 1979 wanted = lazywantedset()
1981 1980
1982 1981 # it might be worthwhile to do this in the iterator if the rev range
1983 1982 # is descending and the prune args are all within that range
1984 1983 for rev in opts.get('prune', ()):
1985 1984 rev = repo[rev].rev()
1986 1985 ff = _followfilter(repo)
1987 1986 stop = min(revs[0], revs[-1])
1988 1987 for x in xrange(rev, stop - 1, -1):
1989 1988 if ff.match(x):
1990 1989 wanted = wanted - [x]
1991 1990
1992 1991 # Now that wanted is correctly initialized, we can iterate over the
1993 1992 # revision range, yielding only revisions in wanted.
1994 1993 def iterate():
1995 1994 if follow and match.always():
1996 1995 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1997 1996 def want(rev):
1998 1997 return ff.match(rev) and rev in wanted
1999 1998 else:
2000 1999 def want(rev):
2001 2000 return rev in wanted
2002 2001
2003 2002 it = iter(revs)
2004 2003 stopiteration = False
2005 2004 for windowsize in increasingwindows():
2006 2005 nrevs = []
2007 2006 for i in xrange(windowsize):
2008 2007 rev = next(it, None)
2009 2008 if rev is None:
2010 2009 stopiteration = True
2011 2010 break
2012 2011 elif want(rev):
2013 2012 nrevs.append(rev)
2014 2013 for rev in sorted(nrevs):
2015 2014 fns = fncache.get(rev)
2016 2015 ctx = change(rev)
2017 2016 if not fns:
2018 2017 def fns_generator():
2019 2018 for f in ctx.files():
2020 2019 if match(f):
2021 2020 yield f
2022 2021 fns = fns_generator()
2023 2022 prepare(ctx, fns)
2024 2023 for rev in nrevs:
2025 2024 yield change(rev)
2026 2025
2027 2026 if stopiteration:
2028 2027 break
2029 2028
2030 2029 return iterate()
2031 2030
2032 2031 def _makefollowlogfilematcher(repo, files, followfirst):
2033 2032 # When displaying a revision with --patch --follow FILE, we have
2034 2033 # to know which file of the revision must be diffed. With
2035 2034 # --follow, we want the names of the ancestors of FILE in the
2036 2035 # revision, stored in "fcache". "fcache" is populated by
2037 2036 # reproducing the graph traversal already done by --follow revset
2038 2037 # and relating revs to file names (which is not "correct" but
2039 2038 # good enough).
2040 2039 fcache = {}
2041 2040 fcacheready = [False]
2042 2041 pctx = repo['.']
2043 2042
2044 2043 def populate():
2045 2044 for fn in files:
2046 2045 fctx = pctx[fn]
2047 2046 fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
2048 2047 for c in fctx.ancestors(followfirst=followfirst):
2049 2048 fcache.setdefault(c.rev(), set()).add(c.path())
2050 2049
2051 2050 def filematcher(rev):
2052 2051 if not fcacheready[0]:
2053 2052 # Lazy initialization
2054 2053 fcacheready[0] = True
2055 2054 populate()
2056 2055 return scmutil.matchfiles(repo, fcache.get(rev, []))
2057 2056
2058 2057 return filematcher
2059 2058
2060 2059 def _makenofollowlogfilematcher(repo, pats, opts):
2061 2060 '''hook for extensions to override the filematcher for non-follow cases'''
2062 2061 return None
2063 2062
2064 2063 def _makelogrevset(repo, pats, opts, revs):
2065 2064 """Return (expr, filematcher) where expr is a revset string built
2066 2065 from log options and file patterns or None. If --stat or --patch
2067 2066 are not passed filematcher is None. Otherwise it is a callable
2068 2067 taking a revision number and returning a match objects filtering
2069 2068 the files to be detailed when displaying the revision.
2070 2069 """
2071 2070 opt2revset = {
2072 2071 'no_merges': ('not merge()', None),
2073 2072 'only_merges': ('merge()', None),
2074 2073 '_ancestors': ('ancestors(%(val)s)', None),
2075 2074 '_fancestors': ('_firstancestors(%(val)s)', None),
2076 2075 '_descendants': ('descendants(%(val)s)', None),
2077 2076 '_fdescendants': ('_firstdescendants(%(val)s)', None),
2078 2077 '_matchfiles': ('_matchfiles(%(val)s)', None),
2079 2078 'date': ('date(%(val)r)', None),
2080 2079 'branch': ('branch(%(val)r)', ' or '),
2081 2080 '_patslog': ('filelog(%(val)r)', ' or '),
2082 2081 '_patsfollow': ('follow(%(val)r)', ' or '),
2083 2082 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
2084 2083 'keyword': ('keyword(%(val)r)', ' or '),
2085 2084 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
2086 2085 'user': ('user(%(val)r)', ' or '),
2087 2086 }
2088 2087
2089 2088 opts = dict(opts)
2090 2089 # follow or not follow?
2091 2090 follow = opts.get('follow') or opts.get('follow_first')
2092 2091 if opts.get('follow_first'):
2093 2092 followfirst = 1
2094 2093 else:
2095 2094 followfirst = 0
2096 2095 # --follow with FILE behavior depends on revs...
2097 2096 it = iter(revs)
2098 2097 startrev = next(it)
2099 2098 followdescendants = startrev < next(it, startrev)
2100 2099
2101 2100 # branch and only_branch are really aliases and must be handled at
2102 2101 # the same time
2103 2102 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
2104 2103 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
2105 2104 # pats/include/exclude are passed to match.match() directly in
2106 2105 # _matchfiles() revset but walkchangerevs() builds its matcher with
2107 2106 # scmutil.match(). The difference is input pats are globbed on
2108 2107 # platforms without shell expansion (windows).
2109 2108 wctx = repo[None]
2110 2109 match, pats = scmutil.matchandpats(wctx, pats, opts)
2111 2110 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2112 2111 opts.get('removed'))
2113 2112 if not slowpath:
2114 2113 for f in match.files():
2115 2114 if follow and f not in wctx:
2116 2115 # If the file exists, it may be a directory, so let it
2117 2116 # take the slow path.
2118 2117 if os.path.exists(repo.wjoin(f)):
2119 2118 slowpath = True
2120 2119 continue
2121 2120 else:
2122 2121 raise error.Abort(_('cannot follow file not in parent '
2123 2122 'revision: "%s"') % f)
2124 2123 filelog = repo.file(f)
2125 2124 if not filelog:
2126 2125 # A zero count may be a directory or deleted file, so
2127 2126 # try to find matching entries on the slow path.
2128 2127 if follow:
2129 2128 raise error.Abort(
2130 2129 _('cannot follow nonexistent file: "%s"') % f)
2131 2130 slowpath = True
2132 2131
2133 2132 # We decided to fall back to the slowpath because at least one
2134 2133 # of the paths was not a file. Check to see if at least one of them
2135 2134 # existed in history - in that case, we'll continue down the
2136 2135 # slowpath; otherwise, we can turn off the slowpath
2137 2136 if slowpath:
2138 2137 for path in match.files():
2139 2138 if path == '.' or path in repo.store:
2140 2139 break
2141 2140 else:
2142 2141 slowpath = False
2143 2142
2144 2143 fpats = ('_patsfollow', '_patsfollowfirst')
2145 2144 fnopats = (('_ancestors', '_fancestors'),
2146 2145 ('_descendants', '_fdescendants'))
2147 2146 if slowpath:
2148 2147 # See walkchangerevs() slow path.
2149 2148 #
2150 2149 # pats/include/exclude cannot be represented as separate
2151 2150 # revset expressions as their filtering logic applies at file
2152 2151 # level. For instance "-I a -X a" matches a revision touching
2153 2152 # "a" and "b" while "file(a) and not file(b)" does
2154 2153 # not. Besides, filesets are evaluated against the working
2155 2154 # directory.
2156 2155 matchargs = ['r:', 'd:relpath']
2157 2156 for p in pats:
2158 2157 matchargs.append('p:' + p)
2159 2158 for p in opts.get('include', []):
2160 2159 matchargs.append('i:' + p)
2161 2160 for p in opts.get('exclude', []):
2162 2161 matchargs.append('x:' + p)
2163 2162 matchargs = ','.join(('%r' % p) for p in matchargs)
2164 2163 opts['_matchfiles'] = matchargs
2165 2164 if follow:
2166 2165 opts[fnopats[0][followfirst]] = '.'
2167 2166 else:
2168 2167 if follow:
2169 2168 if pats:
2170 2169 # follow() revset interprets its file argument as a
2171 2170 # manifest entry, so use match.files(), not pats.
2172 2171 opts[fpats[followfirst]] = list(match.files())
2173 2172 else:
2174 2173 op = fnopats[followdescendants][followfirst]
2175 2174 opts[op] = 'rev(%d)' % startrev
2176 2175 else:
2177 2176 opts['_patslog'] = list(pats)
2178 2177
2179 2178 filematcher = None
2180 2179 if opts.get('patch') or opts.get('stat'):
2181 2180 # When following files, track renames via a special matcher.
2182 2181 # If we're forced to take the slowpath it means we're following
2183 2182 # at least one pattern/directory, so don't bother with rename tracking.
2184 2183 if follow and not match.always() and not slowpath:
2185 2184 # _makefollowlogfilematcher expects its files argument to be
2186 2185 # relative to the repo root, so use match.files(), not pats.
2187 2186 filematcher = _makefollowlogfilematcher(repo, match.files(),
2188 2187 followfirst)
2189 2188 else:
2190 2189 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2191 2190 if filematcher is None:
2192 2191 filematcher = lambda rev: match
2193 2192
2194 2193 expr = []
2195 2194 for op, val in sorted(opts.iteritems()):
2196 2195 if not val:
2197 2196 continue
2198 2197 if op not in opt2revset:
2199 2198 continue
2200 2199 revop, andor = opt2revset[op]
2201 2200 if '%(val)' not in revop:
2202 2201 expr.append(revop)
2203 2202 else:
2204 2203 if not isinstance(val, list):
2205 2204 e = revop % {'val': val}
2206 2205 else:
2207 2206 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2208 2207 expr.append(e)
2209 2208
2210 2209 if expr:
2211 2210 expr = '(' + ' and '.join(expr) + ')'
2212 2211 else:
2213 2212 expr = None
2214 2213 return expr, filematcher
2215 2214
2216 2215 def _logrevs(repo, opts):
2217 2216 # Default --rev value depends on --follow but --follow behavior
2218 2217 # depends on revisions resolved from --rev...
2219 2218 follow = opts.get('follow') or opts.get('follow_first')
2220 2219 if opts.get('rev'):
2221 2220 revs = scmutil.revrange(repo, opts['rev'])
2222 2221 elif follow and repo.dirstate.p1() == nullid:
2223 2222 revs = smartset.baseset()
2224 2223 elif follow:
2225 2224 revs = repo.revs('reverse(:.)')
2226 2225 else:
2227 2226 revs = smartset.spanset(repo)
2228 2227 revs.reverse()
2229 2228 return revs
2230 2229
2231 2230 def getgraphlogrevs(repo, pats, opts):
2232 2231 """Return (revs, expr, filematcher) where revs is an iterable of
2233 2232 revision numbers, expr is a revset string built from log options
2234 2233 and file patterns or None, and used to filter 'revs'. If --stat or
2235 2234 --patch are not passed filematcher is None. Otherwise it is a
2236 2235 callable taking a revision number and returning a match objects
2237 2236 filtering the files to be detailed when displaying the revision.
2238 2237 """
2239 2238 limit = loglimit(opts)
2240 2239 revs = _logrevs(repo, opts)
2241 2240 if not revs:
2242 2241 return smartset.baseset(), None, None
2243 2242 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2244 2243 if opts.get('rev'):
2245 2244 # User-specified revs might be unsorted, but don't sort before
2246 2245 # _makelogrevset because it might depend on the order of revs
2247 2246 if not (revs.isdescending() or revs.istopo()):
2248 2247 revs.sort(reverse=True)
2249 2248 if expr:
2250 2249 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2251 2250 revs = matcher(repo, revs)
2252 2251 if limit is not None:
2253 2252 limitedrevs = []
2254 2253 for idx, rev in enumerate(revs):
2255 2254 if idx >= limit:
2256 2255 break
2257 2256 limitedrevs.append(rev)
2258 2257 revs = smartset.baseset(limitedrevs)
2259 2258
2260 2259 return revs, expr, filematcher
2261 2260
2262 2261 def getlogrevs(repo, pats, opts):
2263 2262 """Return (revs, expr, filematcher) where revs is an iterable of
2264 2263 revision numbers, expr is a revset string built from log options
2265 2264 and file patterns or None, and used to filter 'revs'. If --stat or
2266 2265 --patch are not passed filematcher is None. Otherwise it is a
2267 2266 callable taking a revision number and returning a match objects
2268 2267 filtering the files to be detailed when displaying the revision.
2269 2268 """
2270 2269 limit = loglimit(opts)
2271 2270 revs = _logrevs(repo, opts)
2272 2271 if not revs:
2273 2272 return smartset.baseset([]), None, None
2274 2273 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2275 2274 if expr:
2276 2275 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2277 2276 revs = matcher(repo, revs)
2278 2277 if limit is not None:
2279 2278 limitedrevs = []
2280 2279 for idx, r in enumerate(revs):
2281 2280 if limit <= idx:
2282 2281 break
2283 2282 limitedrevs.append(r)
2284 2283 revs = smartset.baseset(limitedrevs)
2285 2284
2286 2285 return revs, expr, filematcher
2287 2286
2288 2287 def _graphnodeformatter(ui, displayer):
2289 2288 spec = ui.config('ui', 'graphnodetemplate')
2290 2289 if not spec:
2291 2290 return templatekw.showgraphnode # fast path for "{graphnode}"
2292 2291
2293 2292 spec = templater.unquotestring(spec)
2294 2293 templ = formatter.gettemplater(ui, 'graphnode', spec)
2295 2294 cache = {}
2296 2295 if isinstance(displayer, changeset_templater):
2297 2296 cache = displayer.cache # reuse cache of slow templates
2298 2297 props = templatekw.keywords.copy()
2299 2298 props['templ'] = templ
2300 2299 props['cache'] = cache
2301 2300 def formatnode(repo, ctx):
2302 2301 props['ctx'] = ctx
2303 2302 props['repo'] = repo
2304 2303 props['ui'] = repo.ui
2305 2304 props['revcache'] = {}
2306 2305 return templater.stringify(templ('graphnode', **props))
2307 2306 return formatnode
2308 2307
2309 2308 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2310 2309 filematcher=None):
2311 2310 formatnode = _graphnodeformatter(ui, displayer)
2312 2311 state = graphmod.asciistate()
2313 2312 styles = state['styles']
2314 2313
2315 2314 # only set graph styling if HGPLAIN is not set.
2316 2315 if ui.plain('graph'):
2317 2316 # set all edge styles to |, the default pre-3.8 behaviour
2318 2317 styles.update(dict.fromkeys(styles, '|'))
2319 2318 else:
2320 2319 edgetypes = {
2321 2320 'parent': graphmod.PARENT,
2322 2321 'grandparent': graphmod.GRANDPARENT,
2323 2322 'missing': graphmod.MISSINGPARENT
2324 2323 }
2325 2324 for name, key in edgetypes.items():
2326 2325 # experimental config: experimental.graphstyle.*
2327 2326 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2328 2327 styles[key])
2329 2328 if not styles[key]:
2330 2329 styles[key] = None
2331 2330
2332 2331 # experimental config: experimental.graphshorten
2333 2332 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2334 2333
2335 2334 for rev, type, ctx, parents in dag:
2336 2335 char = formatnode(repo, ctx)
2337 2336 copies = None
2338 2337 if getrenamed and ctx.rev():
2339 2338 copies = []
2340 2339 for fn in ctx.files():
2341 2340 rename = getrenamed(fn, ctx.rev())
2342 2341 if rename:
2343 2342 copies.append((fn, rename[0]))
2344 2343 revmatchfn = None
2345 2344 if filematcher is not None:
2346 2345 revmatchfn = filematcher(ctx.rev())
2347 2346 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2348 2347 lines = displayer.hunk.pop(rev).split('\n')
2349 2348 if not lines[-1]:
2350 2349 del lines[-1]
2351 2350 displayer.flush(ctx)
2352 2351 edges = edgefn(type, char, lines, state, rev, parents)
2353 2352 for type, char, lines, coldata in edges:
2354 2353 graphmod.ascii(ui, state, type, char, lines, coldata)
2355 2354 displayer.close()
2356 2355
2357 2356 def graphlog(ui, repo, pats, opts):
2358 2357 # Parameters are identical to log command ones
2359 2358 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2360 2359 revdag = graphmod.dagwalker(repo, revs)
2361 2360
2362 2361 getrenamed = None
2363 2362 if opts.get('copies'):
2364 2363 endrev = None
2365 2364 if opts.get('rev'):
2366 2365 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2367 2366 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2368 2367
2369 2368 ui.pager('log')
2370 2369 displayer = show_changeset(ui, repo, opts, buffered=True)
2371 2370 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2372 2371 filematcher)
2373 2372
2374 2373 def checkunsupportedgraphflags(pats, opts):
2375 2374 for op in ["newest_first"]:
2376 2375 if op in opts and opts[op]:
2377 2376 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2378 2377 % op.replace("_", "-"))
2379 2378
2380 2379 def graphrevs(repo, nodes, opts):
2381 2380 limit = loglimit(opts)
2382 2381 nodes.reverse()
2383 2382 if limit is not None:
2384 2383 nodes = nodes[:limit]
2385 2384 return graphmod.nodes(repo, nodes)
2386 2385
2387 2386 def add(ui, repo, match, prefix, explicitonly, **opts):
2388 2387 join = lambda f: os.path.join(prefix, f)
2389 2388 bad = []
2390 2389
2391 2390 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2392 2391 names = []
2393 2392 wctx = repo[None]
2394 2393 cca = None
2395 2394 abort, warn = scmutil.checkportabilityalert(ui)
2396 2395 if abort or warn:
2397 2396 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2398 2397
2399 2398 badmatch = matchmod.badmatch(match, badfn)
2400 2399 dirstate = repo.dirstate
2401 2400 # We don't want to just call wctx.walk here, since it would return a lot of
2402 2401 # clean files, which we aren't interested in and takes time.
2403 2402 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2404 2403 True, False, full=False)):
2405 2404 exact = match.exact(f)
2406 2405 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2407 2406 if cca:
2408 2407 cca(f)
2409 2408 names.append(f)
2410 2409 if ui.verbose or not exact:
2411 2410 ui.status(_('adding %s\n') % match.rel(f))
2412 2411
2413 2412 for subpath in sorted(wctx.substate):
2414 2413 sub = wctx.sub(subpath)
2415 2414 try:
2416 2415 submatch = matchmod.subdirmatcher(subpath, match)
2417 2416 if opts.get(r'subrepos'):
2418 2417 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2419 2418 else:
2420 2419 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2421 2420 except error.LookupError:
2422 2421 ui.status(_("skipping missing subrepository: %s\n")
2423 2422 % join(subpath))
2424 2423
2425 2424 if not opts.get(r'dry_run'):
2426 2425 rejected = wctx.add(names, prefix)
2427 2426 bad.extend(f for f in rejected if f in match.files())
2428 2427 return bad
2429 2428
2430 2429 def addwebdirpath(repo, serverpath, webconf):
2431 2430 webconf[serverpath] = repo.root
2432 2431 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
2433 2432
2434 2433 for r in repo.revs('filelog("path:.hgsub")'):
2435 2434 ctx = repo[r]
2436 2435 for subpath in ctx.substate:
2437 2436 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2438 2437
2439 2438 def forget(ui, repo, match, prefix, explicitonly):
2440 2439 join = lambda f: os.path.join(prefix, f)
2441 2440 bad = []
2442 2441 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2443 2442 wctx = repo[None]
2444 2443 forgot = []
2445 2444
2446 2445 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2447 2446 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2448 2447 if explicitonly:
2449 2448 forget = [f for f in forget if match.exact(f)]
2450 2449
2451 2450 for subpath in sorted(wctx.substate):
2452 2451 sub = wctx.sub(subpath)
2453 2452 try:
2454 2453 submatch = matchmod.subdirmatcher(subpath, match)
2455 2454 subbad, subforgot = sub.forget(submatch, prefix)
2456 2455 bad.extend([subpath + '/' + f for f in subbad])
2457 2456 forgot.extend([subpath + '/' + f for f in subforgot])
2458 2457 except error.LookupError:
2459 2458 ui.status(_("skipping missing subrepository: %s\n")
2460 2459 % join(subpath))
2461 2460
2462 2461 if not explicitonly:
2463 2462 for f in match.files():
2464 2463 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2465 2464 if f not in forgot:
2466 2465 if repo.wvfs.exists(f):
2467 2466 # Don't complain if the exact case match wasn't given.
2468 2467 # But don't do this until after checking 'forgot', so
2469 2468 # that subrepo files aren't normalized, and this op is
2470 2469 # purely from data cached by the status walk above.
2471 2470 if repo.dirstate.normalize(f) in repo.dirstate:
2472 2471 continue
2473 2472 ui.warn(_('not removing %s: '
2474 2473 'file is already untracked\n')
2475 2474 % match.rel(f))
2476 2475 bad.append(f)
2477 2476
2478 2477 for f in forget:
2479 2478 if ui.verbose or not match.exact(f):
2480 2479 ui.status(_('removing %s\n') % match.rel(f))
2481 2480
2482 2481 rejected = wctx.forget(forget, prefix)
2483 2482 bad.extend(f for f in rejected if f in match.files())
2484 2483 forgot.extend(f for f in forget if f not in rejected)
2485 2484 return bad, forgot
2486 2485
2487 2486 def files(ui, ctx, m, fm, fmt, subrepos):
2488 2487 rev = ctx.rev()
2489 2488 ret = 1
2490 2489 ds = ctx.repo().dirstate
2491 2490
2492 2491 for f in ctx.matches(m):
2493 2492 if rev is None and ds[f] == 'r':
2494 2493 continue
2495 2494 fm.startitem()
2496 2495 if ui.verbose:
2497 2496 fc = ctx[f]
2498 2497 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2499 2498 fm.data(abspath=f)
2500 2499 fm.write('path', fmt, m.rel(f))
2501 2500 ret = 0
2502 2501
2503 2502 for subpath in sorted(ctx.substate):
2504 2503 submatch = matchmod.subdirmatcher(subpath, m)
2505 2504 if (subrepos or m.exact(subpath) or any(submatch.files())):
2506 2505 sub = ctx.sub(subpath)
2507 2506 try:
2508 2507 recurse = m.exact(subpath) or subrepos
2509 2508 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2510 2509 ret = 0
2511 2510 except error.LookupError:
2512 2511 ui.status(_("skipping missing subrepository: %s\n")
2513 2512 % m.abs(subpath))
2514 2513
2515 2514 return ret
2516 2515
2517 2516 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2518 2517 join = lambda f: os.path.join(prefix, f)
2519 2518 ret = 0
2520 2519 s = repo.status(match=m, clean=True)
2521 2520 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2522 2521
2523 2522 wctx = repo[None]
2524 2523
2525 2524 if warnings is None:
2526 2525 warnings = []
2527 2526 warn = True
2528 2527 else:
2529 2528 warn = False
2530 2529
2531 2530 subs = sorted(wctx.substate)
2532 2531 total = len(subs)
2533 2532 count = 0
2534 2533 for subpath in subs:
2535 2534 count += 1
2536 2535 submatch = matchmod.subdirmatcher(subpath, m)
2537 2536 if subrepos or m.exact(subpath) or any(submatch.files()):
2538 2537 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2539 2538 sub = wctx.sub(subpath)
2540 2539 try:
2541 2540 if sub.removefiles(submatch, prefix, after, force, subrepos,
2542 2541 warnings):
2543 2542 ret = 1
2544 2543 except error.LookupError:
2545 2544 warnings.append(_("skipping missing subrepository: %s\n")
2546 2545 % join(subpath))
2547 2546 ui.progress(_('searching'), None)
2548 2547
2549 2548 # warn about failure to delete explicit files/dirs
2550 2549 deleteddirs = util.dirs(deleted)
2551 2550 files = m.files()
2552 2551 total = len(files)
2553 2552 count = 0
2554 2553 for f in files:
2555 2554 def insubrepo():
2556 2555 for subpath in wctx.substate:
2557 2556 if f.startswith(subpath + '/'):
2558 2557 return True
2559 2558 return False
2560 2559
2561 2560 count += 1
2562 2561 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2563 2562 isdir = f in deleteddirs or wctx.hasdir(f)
2564 2563 if (f in repo.dirstate or isdir or f == '.'
2565 2564 or insubrepo() or f in subs):
2566 2565 continue
2567 2566
2568 2567 if repo.wvfs.exists(f):
2569 2568 if repo.wvfs.isdir(f):
2570 2569 warnings.append(_('not removing %s: no tracked files\n')
2571 2570 % m.rel(f))
2572 2571 else:
2573 2572 warnings.append(_('not removing %s: file is untracked\n')
2574 2573 % m.rel(f))
2575 2574 # missing files will generate a warning elsewhere
2576 2575 ret = 1
2577 2576 ui.progress(_('deleting'), None)
2578 2577
2579 2578 if force:
2580 2579 list = modified + deleted + clean + added
2581 2580 elif after:
2582 2581 list = deleted
2583 2582 remaining = modified + added + clean
2584 2583 total = len(remaining)
2585 2584 count = 0
2586 2585 for f in remaining:
2587 2586 count += 1
2588 2587 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2589 2588 warnings.append(_('not removing %s: file still exists\n')
2590 2589 % m.rel(f))
2591 2590 ret = 1
2592 2591 ui.progress(_('skipping'), None)
2593 2592 else:
2594 2593 list = deleted + clean
2595 2594 total = len(modified) + len(added)
2596 2595 count = 0
2597 2596 for f in modified:
2598 2597 count += 1
2599 2598 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2600 2599 warnings.append(_('not removing %s: file is modified (use -f'
2601 2600 ' to force removal)\n') % m.rel(f))
2602 2601 ret = 1
2603 2602 for f in added:
2604 2603 count += 1
2605 2604 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2606 2605 warnings.append(_("not removing %s: file has been marked for add"
2607 2606 " (use 'hg forget' to undo add)\n") % m.rel(f))
2608 2607 ret = 1
2609 2608 ui.progress(_('skipping'), None)
2610 2609
2611 2610 list = sorted(list)
2612 2611 total = len(list)
2613 2612 count = 0
2614 2613 for f in list:
2615 2614 count += 1
2616 2615 if ui.verbose or not m.exact(f):
2617 2616 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2618 2617 ui.status(_('removing %s\n') % m.rel(f))
2619 2618 ui.progress(_('deleting'), None)
2620 2619
2621 2620 with repo.wlock():
2622 2621 if not after:
2623 2622 for f in list:
2624 2623 if f in added:
2625 2624 continue # we never unlink added files on remove
2626 2625 repo.wvfs.unlinkpath(f, ignoremissing=True)
2627 2626 repo[None].forget(list)
2628 2627
2629 2628 if warn:
2630 2629 for warning in warnings:
2631 2630 ui.warn(warning)
2632 2631
2633 2632 return ret
2634 2633
2635 2634 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2636 2635 err = 1
2637 2636
2638 2637 def write(path):
2639 2638 filename = None
2640 2639 if fntemplate:
2641 2640 filename = makefilename(repo, fntemplate, ctx.node(),
2642 2641 pathname=os.path.join(prefix, path))
2643 2642 with formatter.maybereopen(basefm, filename, opts) as fm:
2644 2643 data = ctx[path].data()
2645 2644 if opts.get('decode'):
2646 2645 data = repo.wwritedata(path, data)
2647 2646 fm.startitem()
2648 2647 fm.write('data', '%s', data)
2649 2648 fm.data(abspath=path, path=matcher.rel(path))
2650 2649
2651 2650 # Automation often uses hg cat on single files, so special case it
2652 2651 # for performance to avoid the cost of parsing the manifest.
2653 2652 if len(matcher.files()) == 1 and not matcher.anypats():
2654 2653 file = matcher.files()[0]
2655 2654 mfl = repo.manifestlog
2656 2655 mfnode = ctx.manifestnode()
2657 2656 try:
2658 2657 if mfnode and mfl[mfnode].find(file)[0]:
2659 2658 write(file)
2660 2659 return 0
2661 2660 except KeyError:
2662 2661 pass
2663 2662
2664 2663 for abs in ctx.walk(matcher):
2665 2664 write(abs)
2666 2665 err = 0
2667 2666
2668 2667 for subpath in sorted(ctx.substate):
2669 2668 sub = ctx.sub(subpath)
2670 2669 try:
2671 2670 submatch = matchmod.subdirmatcher(subpath, matcher)
2672 2671
2673 2672 if not sub.cat(submatch, basefm, fntemplate,
2674 2673 os.path.join(prefix, sub._path), **opts):
2675 2674 err = 0
2676 2675 except error.RepoLookupError:
2677 2676 ui.status(_("skipping missing subrepository: %s\n")
2678 2677 % os.path.join(prefix, subpath))
2679 2678
2680 2679 return err
2681 2680
2682 2681 def commit(ui, repo, commitfunc, pats, opts):
2683 2682 '''commit the specified files or all outstanding changes'''
2684 2683 date = opts.get('date')
2685 2684 if date:
2686 2685 opts['date'] = util.parsedate(date)
2687 2686 message = logmessage(ui, opts)
2688 2687 matcher = scmutil.match(repo[None], pats, opts)
2689 2688
2690 2689 # extract addremove carefully -- this function can be called from a command
2691 2690 # that doesn't support addremove
2692 2691 if opts.get('addremove'):
2693 2692 if scmutil.addremove(repo, matcher, "", opts) != 0:
2694 2693 raise error.Abort(
2695 2694 _("failed to mark all new/missing files as added/removed"))
2696 2695
2697 2696 return commitfunc(ui, repo, message, matcher, opts)
2698 2697
2699 2698 def samefile(f, ctx1, ctx2):
2700 2699 if f in ctx1.manifest():
2701 2700 a = ctx1.filectx(f)
2702 2701 if f in ctx2.manifest():
2703 2702 b = ctx2.filectx(f)
2704 2703 return (not a.cmp(b)
2705 2704 and a.flags() == b.flags())
2706 2705 else:
2707 2706 return False
2708 2707 else:
2709 2708 return f not in ctx2.manifest()
2710 2709
2711 2710 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2712 2711 # avoid cycle context -> subrepo -> cmdutil
2713 2712 from . import context
2714 2713
2715 2714 # amend will reuse the existing user if not specified, but the obsolete
2716 2715 # marker creation requires that the current user's name is specified.
2717 2716 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2718 2717 ui.username() # raise exception if username not set
2719 2718
2720 2719 ui.note(_('amending changeset %s\n') % old)
2721 2720 base = old.p1()
2722 2721 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2723 2722
2724 2723 wlock = lock = newid = None
2725 2724 try:
2726 2725 wlock = repo.wlock()
2727 2726 lock = repo.lock()
2728 2727 with repo.transaction('amend') as tr:
2729 2728 # See if we got a message from -m or -l, if not, open the editor
2730 2729 # with the message of the changeset to amend
2731 2730 message = logmessage(ui, opts)
2732 2731 # ensure logfile does not conflict with later enforcement of the
2733 2732 # message. potential logfile content has been processed by
2734 2733 # `logmessage` anyway.
2735 2734 opts.pop('logfile')
2736 2735 # First, do a regular commit to record all changes in the working
2737 2736 # directory (if there are any)
2738 2737 ui.callhooks = False
2739 2738 activebookmark = repo._bookmarks.active
2740 2739 try:
2741 2740 repo._bookmarks.active = None
2742 2741 opts['message'] = 'temporary amend commit for %s' % old
2743 2742 node = commit(ui, repo, commitfunc, pats, opts)
2744 2743 finally:
2745 2744 repo._bookmarks.active = activebookmark
2746 2745 repo._bookmarks.recordchange(tr)
2747 2746 ui.callhooks = True
2748 2747 ctx = repo[node]
2749 2748
2750 2749 # Participating changesets:
2751 2750 #
2752 2751 # node/ctx o - new (intermediate) commit that contains changes
2753 2752 # | from working dir to go into amending commit
2754 2753 # | (or a workingctx if there were no changes)
2755 2754 # |
2756 2755 # old o - changeset to amend
2757 2756 # |
2758 2757 # base o - parent of amending changeset
2759 2758
2760 2759 # Update extra dict from amended commit (e.g. to preserve graft
2761 2760 # source)
2762 2761 extra.update(old.extra())
2763 2762
2764 2763 # Also update it from the intermediate commit or from the wctx
2765 2764 extra.update(ctx.extra())
2766 2765
2767 2766 if len(old.parents()) > 1:
2768 2767 # ctx.files() isn't reliable for merges, so fall back to the
2769 2768 # slower repo.status() method
2770 2769 files = set([fn for st in repo.status(base, old)[:3]
2771 2770 for fn in st])
2772 2771 else:
2773 2772 files = set(old.files())
2774 2773
2775 2774 # Second, we use either the commit we just did, or if there were no
2776 2775 # changes the parent of the working directory as the version of the
2777 2776 # files in the final amend commit
2778 2777 if node:
2779 2778 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2780 2779
2781 2780 user = ctx.user()
2782 2781 date = ctx.date()
2783 2782 # Recompute copies (avoid recording a -> b -> a)
2784 2783 copied = copies.pathcopies(base, ctx)
2785 2784 if old.p2:
2786 2785 copied.update(copies.pathcopies(old.p2(), ctx))
2787 2786
2788 2787 # Prune files which were reverted by the updates: if old
2789 2788 # introduced file X and our intermediate commit, node,
2790 2789 # renamed that file, then those two files are the same and
2791 2790 # we can discard X from our list of files. Likewise if X
2792 2791 # was deleted, it's no longer relevant
2793 2792 files.update(ctx.files())
2794 2793 files = [f for f in files if not samefile(f, ctx, base)]
2795 2794
2796 2795 def filectxfn(repo, ctx_, path):
2797 2796 try:
2798 2797 fctx = ctx[path]
2799 2798 flags = fctx.flags()
2800 2799 mctx = context.memfilectx(repo,
2801 2800 fctx.path(), fctx.data(),
2802 2801 islink='l' in flags,
2803 2802 isexec='x' in flags,
2804 2803 copied=copied.get(path))
2805 2804 return mctx
2806 2805 except KeyError:
2807 2806 return None
2808 2807 else:
2809 2808 ui.note(_('copying changeset %s to %s\n') % (old, base))
2810 2809
2811 2810 # Use version of files as in the old cset
2812 2811 def filectxfn(repo, ctx_, path):
2813 2812 try:
2814 2813 return old.filectx(path)
2815 2814 except KeyError:
2816 2815 return None
2817 2816
2818 2817 user = opts.get('user') or old.user()
2819 2818 date = opts.get('date') or old.date()
2820 2819 editform = mergeeditform(old, 'commit.amend')
2821 2820 editor = getcommiteditor(editform=editform, **opts)
2822 2821 if not message:
2823 2822 editor = getcommiteditor(edit=True, editform=editform)
2824 2823 message = old.description()
2825 2824
2826 2825 pureextra = extra.copy()
2827 2826 extra['amend_source'] = old.hex()
2828 2827
2829 2828 new = context.memctx(repo,
2830 2829 parents=[base.node(), old.p2().node()],
2831 2830 text=message,
2832 2831 files=files,
2833 2832 filectxfn=filectxfn,
2834 2833 user=user,
2835 2834 date=date,
2836 2835 extra=extra,
2837 2836 editor=editor)
2838 2837
2839 2838 newdesc = changelog.stripdesc(new.description())
2840 2839 if ((not node)
2841 2840 and newdesc == old.description()
2842 2841 and user == old.user()
2843 2842 and date == old.date()
2844 2843 and pureextra == old.extra()):
2845 2844 # nothing changed. continuing here would create a new node
2846 2845 # anyway because of the amend_source noise.
2847 2846 #
2848 2847 # This not what we expect from amend.
2849 2848 return old.node()
2850 2849
2851 2850 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2852 2851 try:
2853 2852 if opts.get('secret'):
2854 2853 commitphase = 'secret'
2855 2854 else:
2856 2855 commitphase = old.phase()
2857 2856 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2858 2857 newid = repo.commitctx(new)
2859 2858 finally:
2860 2859 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2861 2860 if newid != old.node():
2862 2861 # Reroute the working copy parent to the new changeset
2863 2862 repo.setparents(newid, nullid)
2864 2863
2865 2864 # Move bookmarks from old parent to amend commit
2866 2865 bms = repo.nodebookmarks(old.node())
2867 2866 if bms:
2868 2867 marks = repo._bookmarks
2869 2868 for bm in bms:
2870 2869 ui.debug('moving bookmarks %r from %s to %s\n' %
2871 2870 (marks, old.hex(), hex(newid)))
2872 2871 marks[bm] = newid
2873 2872 marks.recordchange(tr)
2874 2873 #commit the whole amend process
2875 2874 if createmarkers:
2876 2875 # mark the new changeset as successor of the rewritten one
2877 2876 new = repo[newid]
2878 2877 obs = [(old, (new,))]
2879 2878 if node:
2880 2879 obs.append((ctx, ()))
2881 2880
2882 2881 obsolete.createmarkers(repo, obs, operation='amend')
2883 2882 if not createmarkers and newid != old.node():
2884 2883 # Strip the intermediate commit (if there was one) and the amended
2885 2884 # commit
2886 2885 if node:
2887 2886 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2888 2887 ui.note(_('stripping amended changeset %s\n') % old)
2889 2888 repair.strip(ui, repo, old.node(), topic='amend-backup')
2890 2889 finally:
2891 2890 lockmod.release(lock, wlock)
2892 2891 return newid
2893 2892
2894 2893 def commiteditor(repo, ctx, subs, editform=''):
2895 2894 if ctx.description():
2896 2895 return ctx.description()
2897 2896 return commitforceeditor(repo, ctx, subs, editform=editform,
2898 2897 unchangedmessagedetection=True)
2899 2898
2900 2899 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2901 2900 editform='', unchangedmessagedetection=False):
2902 2901 if not extramsg:
2903 2902 extramsg = _("Leave message empty to abort commit.")
2904 2903
2905 2904 forms = [e for e in editform.split('.') if e]
2906 2905 forms.insert(0, 'changeset')
2907 2906 templatetext = None
2908 2907 while forms:
2909 2908 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2910 2909 if tmpl:
2911 2910 tmpl = templater.unquotestring(tmpl)
2912 2911 templatetext = committext = buildcommittemplate(
2913 2912 repo, ctx, subs, extramsg, tmpl)
2914 2913 break
2915 2914 forms.pop()
2916 2915 else:
2917 2916 committext = buildcommittext(repo, ctx, subs, extramsg)
2918 2917
2919 2918 # run editor in the repository root
2920 2919 olddir = pycompat.getcwd()
2921 2920 os.chdir(repo.root)
2922 2921
2923 2922 # make in-memory changes visible to external process
2924 2923 tr = repo.currenttransaction()
2925 2924 repo.dirstate.write(tr)
2926 2925 pending = tr and tr.writepending() and repo.root
2927 2926
2928 2927 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2929 2928 editform=editform, pending=pending,
2930 2929 repopath=repo.path)
2931 2930 text = editortext
2932 2931
2933 2932 # strip away anything below this special string (used for editors that want
2934 2933 # to display the diff)
2935 2934 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
2936 2935 if stripbelow:
2937 2936 text = text[:stripbelow.start()]
2938 2937
2939 2938 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2940 2939 os.chdir(olddir)
2941 2940
2942 2941 if finishdesc:
2943 2942 text = finishdesc(text)
2944 2943 if not text.strip():
2945 2944 raise error.Abort(_("empty commit message"))
2946 2945 if unchangedmessagedetection and editortext == templatetext:
2947 2946 raise error.Abort(_("commit message unchanged"))
2948 2947
2949 2948 return text
2950 2949
2951 2950 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2952 2951 ui = repo.ui
2953 2952 tmpl, mapfile = gettemplate(ui, tmpl, None)
2954 2953
2955 2954 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2956 2955
2957 2956 for k, v in repo.ui.configitems('committemplate'):
2958 2957 if k != 'changeset':
2959 2958 t.t.cache[k] = v
2960 2959
2961 2960 if not extramsg:
2962 2961 extramsg = '' # ensure that extramsg is string
2963 2962
2964 2963 ui.pushbuffer()
2965 2964 t.show(ctx, extramsg=extramsg)
2966 2965 return ui.popbuffer()
2967 2966
2968 2967 def hgprefix(msg):
2969 2968 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2970 2969
2971 2970 def buildcommittext(repo, ctx, subs, extramsg):
2972 2971 edittext = []
2973 2972 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2974 2973 if ctx.description():
2975 2974 edittext.append(ctx.description())
2976 2975 edittext.append("")
2977 2976 edittext.append("") # Empty line between message and comments.
2978 2977 edittext.append(hgprefix(_("Enter commit message."
2979 2978 " Lines beginning with 'HG:' are removed.")))
2980 2979 edittext.append(hgprefix(extramsg))
2981 2980 edittext.append("HG: --")
2982 2981 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2983 2982 if ctx.p2():
2984 2983 edittext.append(hgprefix(_("branch merge")))
2985 2984 if ctx.branch():
2986 2985 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2987 2986 if bookmarks.isactivewdirparent(repo):
2988 2987 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2989 2988 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2990 2989 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2991 2990 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2992 2991 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2993 2992 if not added and not modified and not removed:
2994 2993 edittext.append(hgprefix(_("no files changed")))
2995 2994 edittext.append("")
2996 2995
2997 2996 return "\n".join(edittext)
2998 2997
2999 2998 def commitstatus(repo, node, branch, bheads=None, opts=None):
3000 2999 if opts is None:
3001 3000 opts = {}
3002 3001 ctx = repo[node]
3003 3002 parents = ctx.parents()
3004 3003
3005 3004 if (not opts.get('amend') and bheads and node not in bheads and not
3006 3005 [x for x in parents if x.node() in bheads and x.branch() == branch]):
3007 3006 repo.ui.status(_('created new head\n'))
3008 3007 # The message is not printed for initial roots. For the other
3009 3008 # changesets, it is printed in the following situations:
3010 3009 #
3011 3010 # Par column: for the 2 parents with ...
3012 3011 # N: null or no parent
3013 3012 # B: parent is on another named branch
3014 3013 # C: parent is a regular non head changeset
3015 3014 # H: parent was a branch head of the current branch
3016 3015 # Msg column: whether we print "created new head" message
3017 3016 # In the following, it is assumed that there already exists some
3018 3017 # initial branch heads of the current branch, otherwise nothing is
3019 3018 # printed anyway.
3020 3019 #
3021 3020 # Par Msg Comment
3022 3021 # N N y additional topo root
3023 3022 #
3024 3023 # B N y additional branch root
3025 3024 # C N y additional topo head
3026 3025 # H N n usual case
3027 3026 #
3028 3027 # B B y weird additional branch root
3029 3028 # C B y branch merge
3030 3029 # H B n merge with named branch
3031 3030 #
3032 3031 # C C y additional head from merge
3033 3032 # C H n merge with a head
3034 3033 #
3035 3034 # H H n head merge: head count decreases
3036 3035
3037 3036 if not opts.get('close_branch'):
3038 3037 for r in parents:
3039 3038 if r.closesbranch() and r.branch() == branch:
3040 3039 repo.ui.status(_('reopening closed branch head %d\n') % r)
3041 3040
3042 3041 if repo.ui.debugflag:
3043 3042 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
3044 3043 elif repo.ui.verbose:
3045 3044 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
3046 3045
3047 3046 def postcommitstatus(repo, pats, opts):
3048 3047 return repo.status(match=scmutil.match(repo[None], pats, opts))
3049 3048
3050 3049 def revert(ui, repo, ctx, parents, *pats, **opts):
3051 3050 parent, p2 = parents
3052 3051 node = ctx.node()
3053 3052
3054 3053 mf = ctx.manifest()
3055 3054 if node == p2:
3056 3055 parent = p2
3057 3056
3058 3057 # need all matching names in dirstate and manifest of target rev,
3059 3058 # so have to walk both. do not print errors if files exist in one
3060 3059 # but not other. in both cases, filesets should be evaluated against
3061 3060 # workingctx to get consistent result (issue4497). this means 'set:**'
3062 3061 # cannot be used to select missing files from target rev.
3063 3062
3064 3063 # `names` is a mapping for all elements in working copy and target revision
3065 3064 # The mapping is in the form:
3066 3065 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3067 3066 names = {}
3068 3067
3069 3068 with repo.wlock():
3070 3069 ## filling of the `names` mapping
3071 3070 # walk dirstate to fill `names`
3072 3071
3073 3072 interactive = opts.get('interactive', False)
3074 3073 wctx = repo[None]
3075 3074 m = scmutil.match(wctx, pats, opts)
3076 3075
3077 3076 # we'll need this later
3078 3077 targetsubs = sorted(s for s in wctx.substate if m(s))
3079 3078
3080 3079 if not m.always():
3081 3080 matcher = matchmod.badmatch(m, lambda x, y: False)
3082 3081 for abs in wctx.walk(matcher):
3083 3082 names[abs] = m.rel(abs), m.exact(abs)
3084 3083
3085 3084 # walk target manifest to fill `names`
3086 3085
3087 3086 def badfn(path, msg):
3088 3087 if path in names:
3089 3088 return
3090 3089 if path in ctx.substate:
3091 3090 return
3092 3091 path_ = path + '/'
3093 3092 for f in names:
3094 3093 if f.startswith(path_):
3095 3094 return
3096 3095 ui.warn("%s: %s\n" % (m.rel(path), msg))
3097 3096
3098 3097 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3099 3098 if abs not in names:
3100 3099 names[abs] = m.rel(abs), m.exact(abs)
3101 3100
3102 3101 # Find status of all file in `names`.
3103 3102 m = scmutil.matchfiles(repo, names)
3104 3103
3105 3104 changes = repo.status(node1=node, match=m,
3106 3105 unknown=True, ignored=True, clean=True)
3107 3106 else:
3108 3107 changes = repo.status(node1=node, match=m)
3109 3108 for kind in changes:
3110 3109 for abs in kind:
3111 3110 names[abs] = m.rel(abs), m.exact(abs)
3112 3111
3113 3112 m = scmutil.matchfiles(repo, names)
3114 3113
3115 3114 modified = set(changes.modified)
3116 3115 added = set(changes.added)
3117 3116 removed = set(changes.removed)
3118 3117 _deleted = set(changes.deleted)
3119 3118 unknown = set(changes.unknown)
3120 3119 unknown.update(changes.ignored)
3121 3120 clean = set(changes.clean)
3122 3121 modadded = set()
3123 3122
3124 3123 # We need to account for the state of the file in the dirstate,
3125 3124 # even when we revert against something else than parent. This will
3126 3125 # slightly alter the behavior of revert (doing back up or not, delete
3127 3126 # or just forget etc).
3128 3127 if parent == node:
3129 3128 dsmodified = modified
3130 3129 dsadded = added
3131 3130 dsremoved = removed
3132 3131 # store all local modifications, useful later for rename detection
3133 3132 localchanges = dsmodified | dsadded
3134 3133 modified, added, removed = set(), set(), set()
3135 3134 else:
3136 3135 changes = repo.status(node1=parent, match=m)
3137 3136 dsmodified = set(changes.modified)
3138 3137 dsadded = set(changes.added)
3139 3138 dsremoved = set(changes.removed)
3140 3139 # store all local modifications, useful later for rename detection
3141 3140 localchanges = dsmodified | dsadded
3142 3141
3143 3142 # only take into account for removes between wc and target
3144 3143 clean |= dsremoved - removed
3145 3144 dsremoved &= removed
3146 3145 # distinct between dirstate remove and other
3147 3146 removed -= dsremoved
3148 3147
3149 3148 modadded = added & dsmodified
3150 3149 added -= modadded
3151 3150
3152 3151 # tell newly modified apart.
3153 3152 dsmodified &= modified
3154 3153 dsmodified |= modified & dsadded # dirstate added may need backup
3155 3154 modified -= dsmodified
3156 3155
3157 3156 # We need to wait for some post-processing to update this set
3158 3157 # before making the distinction. The dirstate will be used for
3159 3158 # that purpose.
3160 3159 dsadded = added
3161 3160
3162 3161 # in case of merge, files that are actually added can be reported as
3163 3162 # modified, we need to post process the result
3164 3163 if p2 != nullid:
3165 3164 mergeadd = set(dsmodified)
3166 3165 for path in dsmodified:
3167 3166 if path in mf:
3168 3167 mergeadd.remove(path)
3169 3168 dsadded |= mergeadd
3170 3169 dsmodified -= mergeadd
3171 3170
3172 3171 # if f is a rename, update `names` to also revert the source
3173 3172 cwd = repo.getcwd()
3174 3173 for f in localchanges:
3175 3174 src = repo.dirstate.copied(f)
3176 3175 # XXX should we check for rename down to target node?
3177 3176 if src and src not in names and repo.dirstate[src] == 'r':
3178 3177 dsremoved.add(src)
3179 3178 names[src] = (repo.pathto(src, cwd), True)
3180 3179
3181 3180 # determine the exact nature of the deleted changesets
3182 3181 deladded = set(_deleted)
3183 3182 for path in _deleted:
3184 3183 if path in mf:
3185 3184 deladded.remove(path)
3186 3185 deleted = _deleted - deladded
3187 3186
3188 3187 # distinguish between file to forget and the other
3189 3188 added = set()
3190 3189 for abs in dsadded:
3191 3190 if repo.dirstate[abs] != 'a':
3192 3191 added.add(abs)
3193 3192 dsadded -= added
3194 3193
3195 3194 for abs in deladded:
3196 3195 if repo.dirstate[abs] == 'a':
3197 3196 dsadded.add(abs)
3198 3197 deladded -= dsadded
3199 3198
3200 3199 # For files marked as removed, we check if an unknown file is present at
3201 3200 # the same path. If a such file exists it may need to be backed up.
3202 3201 # Making the distinction at this stage helps have simpler backup
3203 3202 # logic.
3204 3203 removunk = set()
3205 3204 for abs in removed:
3206 3205 target = repo.wjoin(abs)
3207 3206 if os.path.lexists(target):
3208 3207 removunk.add(abs)
3209 3208 removed -= removunk
3210 3209
3211 3210 dsremovunk = set()
3212 3211 for abs in dsremoved:
3213 3212 target = repo.wjoin(abs)
3214 3213 if os.path.lexists(target):
3215 3214 dsremovunk.add(abs)
3216 3215 dsremoved -= dsremovunk
3217 3216
3218 3217 # action to be actually performed by revert
3219 3218 # (<list of file>, message>) tuple
3220 3219 actions = {'revert': ([], _('reverting %s\n')),
3221 3220 'add': ([], _('adding %s\n')),
3222 3221 'remove': ([], _('removing %s\n')),
3223 3222 'drop': ([], _('removing %s\n')),
3224 3223 'forget': ([], _('forgetting %s\n')),
3225 3224 'undelete': ([], _('undeleting %s\n')),
3226 3225 'noop': (None, _('no changes needed to %s\n')),
3227 3226 'unknown': (None, _('file not managed: %s\n')),
3228 3227 }
3229 3228
3230 3229 # "constant" that convey the backup strategy.
3231 3230 # All set to `discard` if `no-backup` is set do avoid checking
3232 3231 # no_backup lower in the code.
3233 3232 # These values are ordered for comparison purposes
3234 3233 backupinteractive = 3 # do backup if interactively modified
3235 3234 backup = 2 # unconditionally do backup
3236 3235 check = 1 # check if the existing file differs from target
3237 3236 discard = 0 # never do backup
3238 3237 if opts.get('no_backup'):
3239 3238 backupinteractive = backup = check = discard
3240 3239 if interactive:
3241 3240 dsmodifiedbackup = backupinteractive
3242 3241 else:
3243 3242 dsmodifiedbackup = backup
3244 3243 tobackup = set()
3245 3244
3246 3245 backupanddel = actions['remove']
3247 3246 if not opts.get('no_backup'):
3248 3247 backupanddel = actions['drop']
3249 3248
3250 3249 disptable = (
3251 3250 # dispatch table:
3252 3251 # file state
3253 3252 # action
3254 3253 # make backup
3255 3254
3256 3255 ## Sets that results that will change file on disk
3257 3256 # Modified compared to target, no local change
3258 3257 (modified, actions['revert'], discard),
3259 3258 # Modified compared to target, but local file is deleted
3260 3259 (deleted, actions['revert'], discard),
3261 3260 # Modified compared to target, local change
3262 3261 (dsmodified, actions['revert'], dsmodifiedbackup),
3263 3262 # Added since target
3264 3263 (added, actions['remove'], discard),
3265 3264 # Added in working directory
3266 3265 (dsadded, actions['forget'], discard),
3267 3266 # Added since target, have local modification
3268 3267 (modadded, backupanddel, backup),
3269 3268 # Added since target but file is missing in working directory
3270 3269 (deladded, actions['drop'], discard),
3271 3270 # Removed since target, before working copy parent
3272 3271 (removed, actions['add'], discard),
3273 3272 # Same as `removed` but an unknown file exists at the same path
3274 3273 (removunk, actions['add'], check),
3275 3274 # Removed since targe, marked as such in working copy parent
3276 3275 (dsremoved, actions['undelete'], discard),
3277 3276 # Same as `dsremoved` but an unknown file exists at the same path
3278 3277 (dsremovunk, actions['undelete'], check),
3279 3278 ## the following sets does not result in any file changes
3280 3279 # File with no modification
3281 3280 (clean, actions['noop'], discard),
3282 3281 # Existing file, not tracked anywhere
3283 3282 (unknown, actions['unknown'], discard),
3284 3283 )
3285 3284
3286 3285 for abs, (rel, exact) in sorted(names.items()):
3287 3286 # target file to be touch on disk (relative to cwd)
3288 3287 target = repo.wjoin(abs)
3289 3288 # search the entry in the dispatch table.
3290 3289 # if the file is in any of these sets, it was touched in the working
3291 3290 # directory parent and we are sure it needs to be reverted.
3292 3291 for table, (xlist, msg), dobackup in disptable:
3293 3292 if abs not in table:
3294 3293 continue
3295 3294 if xlist is not None:
3296 3295 xlist.append(abs)
3297 3296 if dobackup:
3298 3297 # If in interactive mode, don't automatically create
3299 3298 # .orig files (issue4793)
3300 3299 if dobackup == backupinteractive:
3301 3300 tobackup.add(abs)
3302 3301 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3303 3302 bakname = scmutil.origpath(ui, repo, rel)
3304 3303 ui.note(_('saving current version of %s as %s\n') %
3305 3304 (rel, bakname))
3306 3305 if not opts.get('dry_run'):
3307 3306 if interactive:
3308 3307 util.copyfile(target, bakname)
3309 3308 else:
3310 3309 util.rename(target, bakname)
3311 3310 if ui.verbose or not exact:
3312 3311 if not isinstance(msg, basestring):
3313 3312 msg = msg(abs)
3314 3313 ui.status(msg % rel)
3315 3314 elif exact:
3316 3315 ui.warn(msg % rel)
3317 3316 break
3318 3317
3319 3318 if not opts.get('dry_run'):
3320 3319 needdata = ('revert', 'add', 'undelete')
3321 3320 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3322 3321 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3323 3322
3324 3323 if targetsubs:
3325 3324 # Revert the subrepos on the revert list
3326 3325 for sub in targetsubs:
3327 3326 try:
3328 3327 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3329 3328 except KeyError:
3330 3329 raise error.Abort("subrepository '%s' does not exist in %s!"
3331 3330 % (sub, short(ctx.node())))
3332 3331
3333 3332 def _revertprefetch(repo, ctx, *files):
3334 3333 """Let extension changing the storage layer prefetch content"""
3335 3334 pass
3336 3335
3337 3336 def _performrevert(repo, parents, ctx, actions, interactive=False,
3338 3337 tobackup=None):
3339 3338 """function that actually perform all the actions computed for revert
3340 3339
3341 3340 This is an independent function to let extension to plug in and react to
3342 3341 the imminent revert.
3343 3342
3344 3343 Make sure you have the working directory locked when calling this function.
3345 3344 """
3346 3345 parent, p2 = parents
3347 3346 node = ctx.node()
3348 3347 excluded_files = []
3349 3348 matcher_opts = {"exclude": excluded_files}
3350 3349
3351 3350 def checkout(f):
3352 3351 fc = ctx[f]
3353 3352 repo.wwrite(f, fc.data(), fc.flags())
3354 3353
3355 3354 def doremove(f):
3356 3355 try:
3357 3356 repo.wvfs.unlinkpath(f)
3358 3357 except OSError:
3359 3358 pass
3360 3359 repo.dirstate.remove(f)
3361 3360
3362 3361 audit_path = pathutil.pathauditor(repo.root)
3363 3362 for f in actions['forget'][0]:
3364 3363 if interactive:
3365 3364 choice = repo.ui.promptchoice(
3366 3365 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
3367 3366 if choice == 0:
3368 3367 repo.dirstate.drop(f)
3369 3368 else:
3370 3369 excluded_files.append(repo.wjoin(f))
3371 3370 else:
3372 3371 repo.dirstate.drop(f)
3373 3372 for f in actions['remove'][0]:
3374 3373 audit_path(f)
3375 3374 if interactive:
3376 3375 choice = repo.ui.promptchoice(
3377 3376 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
3378 3377 if choice == 0:
3379 3378 doremove(f)
3380 3379 else:
3381 3380 excluded_files.append(repo.wjoin(f))
3382 3381 else:
3383 3382 doremove(f)
3384 3383 for f in actions['drop'][0]:
3385 3384 audit_path(f)
3386 3385 repo.dirstate.remove(f)
3387 3386
3388 3387 normal = None
3389 3388 if node == parent:
3390 3389 # We're reverting to our parent. If possible, we'd like status
3391 3390 # to report the file as clean. We have to use normallookup for
3392 3391 # merges to avoid losing information about merged/dirty files.
3393 3392 if p2 != nullid:
3394 3393 normal = repo.dirstate.normallookup
3395 3394 else:
3396 3395 normal = repo.dirstate.normal
3397 3396
3398 3397 newlyaddedandmodifiedfiles = set()
3399 3398 if interactive:
3400 3399 # Prompt the user for changes to revert
3401 3400 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3402 3401 m = scmutil.match(ctx, torevert, matcher_opts)
3403 3402 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3404 3403 diffopts.nodates = True
3405 3404 diffopts.git = True
3406 3405 operation = 'discard'
3407 3406 reversehunks = True
3408 3407 if node != parent:
3409 3408 operation = 'revert'
3410 3409 reversehunks = repo.ui.configbool('experimental',
3411 3410 'revertalternateinteractivemode',
3412 3411 True)
3413 3412 if reversehunks:
3414 3413 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3415 3414 else:
3416 3415 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3417 3416 originalchunks = patch.parsepatch(diff)
3418 3417
3419 3418 try:
3420 3419
3421 3420 chunks, opts = recordfilter(repo.ui, originalchunks,
3422 3421 operation=operation)
3423 3422 if reversehunks:
3424 3423 chunks = patch.reversehunks(chunks)
3425 3424
3426 3425 except patch.PatchError as err:
3427 3426 raise error.Abort(_('error parsing patch: %s') % err)
3428 3427
3429 3428 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3430 3429 if tobackup is None:
3431 3430 tobackup = set()
3432 3431 # Apply changes
3433 3432 fp = stringio()
3434 3433 for c in chunks:
3435 3434 # Create a backup file only if this hunk should be backed up
3436 3435 if ishunk(c) and c.header.filename() in tobackup:
3437 3436 abs = c.header.filename()
3438 3437 target = repo.wjoin(abs)
3439 3438 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3440 3439 util.copyfile(target, bakname)
3441 3440 tobackup.remove(abs)
3442 3441 c.write(fp)
3443 3442 dopatch = fp.tell()
3444 3443 fp.seek(0)
3445 3444 if dopatch:
3446 3445 try:
3447 3446 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3448 3447 except patch.PatchError as err:
3449 3448 raise error.Abort(str(err))
3450 3449 del fp
3451 3450 else:
3452 3451 for f in actions['revert'][0]:
3453 3452 checkout(f)
3454 3453 if normal:
3455 3454 normal(f)
3456 3455
3457 3456 for f in actions['add'][0]:
3458 3457 # Don't checkout modified files, they are already created by the diff
3459 3458 if f not in newlyaddedandmodifiedfiles:
3460 3459 checkout(f)
3461 3460 repo.dirstate.add(f)
3462 3461
3463 3462 normal = repo.dirstate.normallookup
3464 3463 if node == parent and p2 == nullid:
3465 3464 normal = repo.dirstate.normal
3466 3465 for f in actions['undelete'][0]:
3467 3466 checkout(f)
3468 3467 normal(f)
3469 3468
3470 3469 copied = copies.pathcopies(repo[parent], ctx)
3471 3470
3472 3471 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3473 3472 if f in copied:
3474 3473 repo.dirstate.copy(copied[f], f)
3475 3474
3476 3475 class command(registrar.command):
3477 3476 def _doregister(self, func, name, *args, **kwargs):
3478 3477 func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
3479 3478 return super(command, self)._doregister(func, name, *args, **kwargs)
3480 3479
3481 3480 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3482 3481 # commands.outgoing. "missing" is "missing" of the result of
3483 3482 # "findcommonoutgoing()"
3484 3483 outgoinghooks = util.hooks()
3485 3484
3486 3485 # a list of (ui, repo) functions called by commands.summary
3487 3486 summaryhooks = util.hooks()
3488 3487
3489 3488 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3490 3489 #
3491 3490 # functions should return tuple of booleans below, if 'changes' is None:
3492 3491 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3493 3492 #
3494 3493 # otherwise, 'changes' is a tuple of tuples below:
3495 3494 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3496 3495 # - (desturl, destbranch, destpeer, outgoing)
3497 3496 summaryremotehooks = util.hooks()
3498 3497
3499 3498 # A list of state files kept by multistep operations like graft.
3500 3499 # Since graft cannot be aborted, it is considered 'clearable' by update.
3501 3500 # note: bisect is intentionally excluded
3502 3501 # (state file, clearable, allowcommit, error, hint)
3503 3502 unfinishedstates = [
3504 3503 ('graftstate', True, False, _('graft in progress'),
3505 3504 _("use 'hg graft --continue' or 'hg update' to abort")),
3506 3505 ('updatestate', True, False, _('last update was interrupted'),
3507 3506 _("use 'hg update' to get a consistent checkout"))
3508 3507 ]
3509 3508
3510 3509 def checkunfinished(repo, commit=False):
3511 3510 '''Look for an unfinished multistep operation, like graft, and abort
3512 3511 if found. It's probably good to check this right before
3513 3512 bailifchanged().
3514 3513 '''
3515 3514 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3516 3515 if commit and allowcommit:
3517 3516 continue
3518 3517 if repo.vfs.exists(f):
3519 3518 raise error.Abort(msg, hint=hint)
3520 3519
3521 3520 def clearunfinished(repo):
3522 3521 '''Check for unfinished operations (as above), and clear the ones
3523 3522 that are clearable.
3524 3523 '''
3525 3524 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3526 3525 if not clearable and repo.vfs.exists(f):
3527 3526 raise error.Abort(msg, hint=hint)
3528 3527 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3529 3528 if clearable and repo.vfs.exists(f):
3530 3529 util.unlink(repo.vfs.join(f))
3531 3530
3532 3531 afterresolvedstates = [
3533 3532 ('graftstate',
3534 3533 _('hg graft --continue')),
3535 3534 ]
3536 3535
3537 3536 def howtocontinue(repo):
3538 3537 '''Check for an unfinished operation and return the command to finish
3539 3538 it.
3540 3539
3541 3540 afterresolvedstates tuples define a .hg/{file} and the corresponding
3542 3541 command needed to finish it.
3543 3542
3544 3543 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3545 3544 a boolean.
3546 3545 '''
3547 3546 contmsg = _("continue: %s")
3548 3547 for f, msg in afterresolvedstates:
3549 3548 if repo.vfs.exists(f):
3550 3549 return contmsg % msg, True
3551 3550 workingctx = repo[None]
3552 3551 dirty = any(repo.status()) or any(workingctx.sub(s).dirty()
3553 3552 for s in workingctx.substate)
3554 3553 if dirty:
3555 3554 return contmsg % _("hg commit"), False
3556 3555 return None, None
3557 3556
3558 3557 def checkafterresolved(repo):
3559 3558 '''Inform the user about the next action after completing hg resolve
3560 3559
3561 3560 If there's a matching afterresolvedstates, howtocontinue will yield
3562 3561 repo.ui.warn as the reporter.
3563 3562
3564 3563 Otherwise, it will yield repo.ui.note.
3565 3564 '''
3566 3565 msg, warning = howtocontinue(repo)
3567 3566 if msg is not None:
3568 3567 if warning:
3569 3568 repo.ui.warn("%s\n" % msg)
3570 3569 else:
3571 3570 repo.ui.note("%s\n" % msg)
3572 3571
3573 3572 def wrongtooltocontinue(repo, task):
3574 3573 '''Raise an abort suggesting how to properly continue if there is an
3575 3574 active task.
3576 3575
3577 3576 Uses howtocontinue() to find the active task.
3578 3577
3579 3578 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3580 3579 a hint.
3581 3580 '''
3582 3581 after = howtocontinue(repo)
3583 3582 hint = None
3584 3583 if after[1]:
3585 3584 hint = after[0]
3586 3585 raise error.Abort(_('no %s in progress') % task, hint=hint)
@@ -1,983 +1,994 b''
1 1 # scmutil.py - Mercurial core utility functions
2 2 #
3 3 # Copyright Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import glob
12 12 import hashlib
13 13 import os
14 14 import re
15 15 import socket
16 16
17 17 from .i18n import _
18 from .node import wdirrev
18 from .node import (
19 wdirid,
20 wdirrev,
21 )
22
19 23 from . import (
20 24 encoding,
21 25 error,
22 26 match as matchmod,
23 27 pathutil,
24 28 phases,
25 29 pycompat,
26 30 revsetlang,
27 31 similar,
28 32 util,
29 33 )
30 34
31 35 if pycompat.osname == 'nt':
32 36 from . import scmwindows as scmplatform
33 37 else:
34 38 from . import scmposix as scmplatform
35 39
36 40 termsize = scmplatform.termsize
37 41
38 42 class status(tuple):
39 43 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
40 44 and 'ignored' properties are only relevant to the working copy.
41 45 '''
42 46
43 47 __slots__ = ()
44 48
45 49 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
46 50 clean):
47 51 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
48 52 ignored, clean))
49 53
50 54 @property
51 55 def modified(self):
52 56 '''files that have been modified'''
53 57 return self[0]
54 58
55 59 @property
56 60 def added(self):
57 61 '''files that have been added'''
58 62 return self[1]
59 63
60 64 @property
61 65 def removed(self):
62 66 '''files that have been removed'''
63 67 return self[2]
64 68
65 69 @property
66 70 def deleted(self):
67 71 '''files that are in the dirstate, but have been deleted from the
68 72 working copy (aka "missing")
69 73 '''
70 74 return self[3]
71 75
72 76 @property
73 77 def unknown(self):
74 78 '''files not in the dirstate that are not ignored'''
75 79 return self[4]
76 80
77 81 @property
78 82 def ignored(self):
79 83 '''files not in the dirstate that are ignored (by _dirignore())'''
80 84 return self[5]
81 85
82 86 @property
83 87 def clean(self):
84 88 '''files that have not been modified'''
85 89 return self[6]
86 90
87 91 def __repr__(self, *args, **kwargs):
88 92 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
89 93 'unknown=%r, ignored=%r, clean=%r>') % self)
90 94
91 95 def itersubrepos(ctx1, ctx2):
92 96 """find subrepos in ctx1 or ctx2"""
93 97 # Create a (subpath, ctx) mapping where we prefer subpaths from
94 98 # ctx1. The subpaths from ctx2 are important when the .hgsub file
95 99 # has been modified (in ctx2) but not yet committed (in ctx1).
96 100 subpaths = dict.fromkeys(ctx2.substate, ctx2)
97 101 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
98 102
99 103 missing = set()
100 104
101 105 for subpath in ctx2.substate:
102 106 if subpath not in ctx1.substate:
103 107 del subpaths[subpath]
104 108 missing.add(subpath)
105 109
106 110 for subpath, ctx in sorted(subpaths.iteritems()):
107 111 yield subpath, ctx.sub(subpath)
108 112
109 113 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
110 114 # status and diff will have an accurate result when it does
111 115 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
112 116 # against itself.
113 117 for subpath in missing:
114 118 yield subpath, ctx2.nullsub(subpath, ctx1)
115 119
116 120 def nochangesfound(ui, repo, excluded=None):
117 121 '''Report no changes for push/pull, excluded is None or a list of
118 122 nodes excluded from the push/pull.
119 123 '''
120 124 secretlist = []
121 125 if excluded:
122 126 for n in excluded:
123 127 ctx = repo[n]
124 128 if ctx.phase() >= phases.secret and not ctx.extinct():
125 129 secretlist.append(n)
126 130
127 131 if secretlist:
128 132 ui.status(_("no changes found (ignored %d secret changesets)\n")
129 133 % len(secretlist))
130 134 else:
131 135 ui.status(_("no changes found\n"))
132 136
133 137 def callcatch(ui, func):
134 138 """call func() with global exception handling
135 139
136 140 return func() if no exception happens. otherwise do some error handling
137 141 and return an exit code accordingly. does not handle all exceptions.
138 142 """
139 143 try:
140 144 try:
141 145 return func()
142 146 except: # re-raises
143 147 ui.traceback()
144 148 raise
145 149 # Global exception handling, alphabetically
146 150 # Mercurial-specific first, followed by built-in and library exceptions
147 151 except error.LockHeld as inst:
148 152 if inst.errno == errno.ETIMEDOUT:
149 153 reason = _('timed out waiting for lock held by %r') % inst.locker
150 154 else:
151 155 reason = _('lock held by %r') % inst.locker
152 156 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
153 157 if not inst.locker:
154 158 ui.warn(_("(lock might be very busy)\n"))
155 159 except error.LockUnavailable as inst:
156 160 ui.warn(_("abort: could not lock %s: %s\n") %
157 161 (inst.desc or inst.filename, inst.strerror))
158 162 except error.OutOfBandError as inst:
159 163 if inst.args:
160 164 msg = _("abort: remote error:\n")
161 165 else:
162 166 msg = _("abort: remote error\n")
163 167 ui.warn(msg)
164 168 if inst.args:
165 169 ui.warn(''.join(inst.args))
166 170 if inst.hint:
167 171 ui.warn('(%s)\n' % inst.hint)
168 172 except error.RepoError as inst:
169 173 ui.warn(_("abort: %s!\n") % inst)
170 174 if inst.hint:
171 175 ui.warn(_("(%s)\n") % inst.hint)
172 176 except error.ResponseError as inst:
173 177 ui.warn(_("abort: %s") % inst.args[0])
174 178 if not isinstance(inst.args[1], basestring):
175 179 ui.warn(" %r\n" % (inst.args[1],))
176 180 elif not inst.args[1]:
177 181 ui.warn(_(" empty string\n"))
178 182 else:
179 183 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
180 184 except error.CensoredNodeError as inst:
181 185 ui.warn(_("abort: file censored %s!\n") % inst)
182 186 except error.RevlogError as inst:
183 187 ui.warn(_("abort: %s!\n") % inst)
184 188 except error.InterventionRequired as inst:
185 189 ui.warn("%s\n" % inst)
186 190 if inst.hint:
187 191 ui.warn(_("(%s)\n") % inst.hint)
188 192 return 1
189 193 except error.Abort as inst:
190 194 ui.warn(_("abort: %s\n") % inst)
191 195 if inst.hint:
192 196 ui.warn(_("(%s)\n") % inst.hint)
193 197 except ImportError as inst:
194 198 ui.warn(_("abort: %s!\n") % inst)
195 199 m = str(inst).split()[-1]
196 200 if m in "mpatch bdiff".split():
197 201 ui.warn(_("(did you forget to compile extensions?)\n"))
198 202 elif m in "zlib".split():
199 203 ui.warn(_("(is your Python install correct?)\n"))
200 204 except IOError as inst:
201 205 if util.safehasattr(inst, "code"):
202 206 ui.warn(_("abort: %s\n") % inst)
203 207 elif util.safehasattr(inst, "reason"):
204 208 try: # usually it is in the form (errno, strerror)
205 209 reason = inst.reason.args[1]
206 210 except (AttributeError, IndexError):
207 211 # it might be anything, for example a string
208 212 reason = inst.reason
209 213 if isinstance(reason, unicode):
210 214 # SSLError of Python 2.7.9 contains a unicode
211 215 reason = encoding.unitolocal(reason)
212 216 ui.warn(_("abort: error: %s\n") % reason)
213 217 elif (util.safehasattr(inst, "args")
214 218 and inst.args and inst.args[0] == errno.EPIPE):
215 219 pass
216 220 elif getattr(inst, "strerror", None):
217 221 if getattr(inst, "filename", None):
218 222 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
219 223 else:
220 224 ui.warn(_("abort: %s\n") % inst.strerror)
221 225 else:
222 226 raise
223 227 except OSError as inst:
224 228 if getattr(inst, "filename", None) is not None:
225 229 ui.warn(_("abort: %s: '%s'\n") % (inst.strerror, inst.filename))
226 230 else:
227 231 ui.warn(_("abort: %s\n") % inst.strerror)
228 232 except MemoryError:
229 233 ui.warn(_("abort: out of memory\n"))
230 234 except SystemExit as inst:
231 235 # Commands shouldn't sys.exit directly, but give a return code.
232 236 # Just in case catch this and and pass exit code to caller.
233 237 return inst.code
234 238 except socket.error as inst:
235 239 ui.warn(_("abort: %s\n") % inst.args[-1])
236 240
237 241 return -1
238 242
239 243 def checknewlabel(repo, lbl, kind):
240 244 # Do not use the "kind" parameter in ui output.
241 245 # It makes strings difficult to translate.
242 246 if lbl in ['tip', '.', 'null']:
243 247 raise error.Abort(_("the name '%s' is reserved") % lbl)
244 248 for c in (':', '\0', '\n', '\r'):
245 249 if c in lbl:
246 250 raise error.Abort(_("%r cannot be used in a name") % c)
247 251 try:
248 252 int(lbl)
249 253 raise error.Abort(_("cannot use an integer as a name"))
250 254 except ValueError:
251 255 pass
252 256
253 257 def checkfilename(f):
254 258 '''Check that the filename f is an acceptable filename for a tracked file'''
255 259 if '\r' in f or '\n' in f:
256 260 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
257 261
258 262 def checkportable(ui, f):
259 263 '''Check if filename f is portable and warn or abort depending on config'''
260 264 checkfilename(f)
261 265 abort, warn = checkportabilityalert(ui)
262 266 if abort or warn:
263 267 msg = util.checkwinfilename(f)
264 268 if msg:
265 269 msg = "%s: %r" % (msg, f)
266 270 if abort:
267 271 raise error.Abort(msg)
268 272 ui.warn(_("warning: %s\n") % msg)
269 273
270 274 def checkportabilityalert(ui):
271 275 '''check if the user's config requests nothing, a warning, or abort for
272 276 non-portable filenames'''
273 277 val = ui.config('ui', 'portablefilenames', 'warn')
274 278 lval = val.lower()
275 279 bval = util.parsebool(val)
276 280 abort = pycompat.osname == 'nt' or lval == 'abort'
277 281 warn = bval or lval == 'warn'
278 282 if bval is None and not (warn or abort or lval == 'ignore'):
279 283 raise error.ConfigError(
280 284 _("ui.portablefilenames value is invalid ('%s')") % val)
281 285 return abort, warn
282 286
283 287 class casecollisionauditor(object):
284 288 def __init__(self, ui, abort, dirstate):
285 289 self._ui = ui
286 290 self._abort = abort
287 291 allfiles = '\0'.join(dirstate._map)
288 292 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
289 293 self._dirstate = dirstate
290 294 # The purpose of _newfiles is so that we don't complain about
291 295 # case collisions if someone were to call this object with the
292 296 # same filename twice.
293 297 self._newfiles = set()
294 298
295 299 def __call__(self, f):
296 300 if f in self._newfiles:
297 301 return
298 302 fl = encoding.lower(f)
299 303 if fl in self._loweredfiles and f not in self._dirstate:
300 304 msg = _('possible case-folding collision for %s') % f
301 305 if self._abort:
302 306 raise error.Abort(msg)
303 307 self._ui.warn(_("warning: %s\n") % msg)
304 308 self._loweredfiles.add(fl)
305 309 self._newfiles.add(f)
306 310
307 311 def filteredhash(repo, maxrev):
308 312 """build hash of filtered revisions in the current repoview.
309 313
310 314 Multiple caches perform up-to-date validation by checking that the
311 315 tiprev and tipnode stored in the cache file match the current repository.
312 316 However, this is not sufficient for validating repoviews because the set
313 317 of revisions in the view may change without the repository tiprev and
314 318 tipnode changing.
315 319
316 320 This function hashes all the revs filtered from the view and returns
317 321 that SHA-1 digest.
318 322 """
319 323 cl = repo.changelog
320 324 if not cl.filteredrevs:
321 325 return None
322 326 key = None
323 327 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
324 328 if revs:
325 329 s = hashlib.sha1()
326 330 for rev in revs:
327 331 s.update('%d;' % rev)
328 332 key = s.digest()
329 333 return key
330 334
331 335 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
332 336 '''yield every hg repository under path, always recursively.
333 337 The recurse flag will only control recursion into repo working dirs'''
334 338 def errhandler(err):
335 339 if err.filename == path:
336 340 raise err
337 341 samestat = getattr(os.path, 'samestat', None)
338 342 if followsym and samestat is not None:
339 343 def adddir(dirlst, dirname):
340 344 match = False
341 345 dirstat = os.stat(dirname)
342 346 for lstdirstat in dirlst:
343 347 if samestat(dirstat, lstdirstat):
344 348 match = True
345 349 break
346 350 if not match:
347 351 dirlst.append(dirstat)
348 352 return not match
349 353 else:
350 354 followsym = False
351 355
352 356 if (seen_dirs is None) and followsym:
353 357 seen_dirs = []
354 358 adddir(seen_dirs, path)
355 359 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
356 360 dirs.sort()
357 361 if '.hg' in dirs:
358 362 yield root # found a repository
359 363 qroot = os.path.join(root, '.hg', 'patches')
360 364 if os.path.isdir(os.path.join(qroot, '.hg')):
361 365 yield qroot # we have a patch queue repo here
362 366 if recurse:
363 367 # avoid recursing inside the .hg directory
364 368 dirs.remove('.hg')
365 369 else:
366 370 dirs[:] = [] # don't descend further
367 371 elif followsym:
368 372 newdirs = []
369 373 for d in dirs:
370 374 fname = os.path.join(root, d)
371 375 if adddir(seen_dirs, fname):
372 376 if os.path.islink(fname):
373 377 for hgname in walkrepos(fname, True, seen_dirs):
374 378 yield hgname
375 379 else:
376 380 newdirs.append(d)
377 381 dirs[:] = newdirs
378 382
383 def binnode(ctx):
384 """Return binary node id for a given basectx"""
385 node = ctx.node()
386 if node is None:
387 return wdirid
388 return node
389
379 390 def intrev(ctx):
380 391 """Return integer for a given basectx that can be used in comparison or
381 392 arithmetic operation"""
382 393 rev = ctx.rev()
383 394 if rev is None:
384 395 return wdirrev
385 396 return rev
386 397
387 398 def revsingle(repo, revspec, default='.'):
388 399 if not revspec and revspec != 0:
389 400 return repo[default]
390 401
391 402 l = revrange(repo, [revspec])
392 403 if not l:
393 404 raise error.Abort(_('empty revision set'))
394 405 return repo[l.last()]
395 406
396 407 def _pairspec(revspec):
397 408 tree = revsetlang.parse(revspec)
398 409 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
399 410
400 411 def revpair(repo, revs):
401 412 if not revs:
402 413 return repo.dirstate.p1(), None
403 414
404 415 l = revrange(repo, revs)
405 416
406 417 if not l:
407 418 first = second = None
408 419 elif l.isascending():
409 420 first = l.min()
410 421 second = l.max()
411 422 elif l.isdescending():
412 423 first = l.max()
413 424 second = l.min()
414 425 else:
415 426 first = l.first()
416 427 second = l.last()
417 428
418 429 if first is None:
419 430 raise error.Abort(_('empty revision range'))
420 431 if (first == second and len(revs) >= 2
421 432 and not all(revrange(repo, [r]) for r in revs)):
422 433 raise error.Abort(_('empty revision on one side of range'))
423 434
424 435 # if top-level is range expression, the result must always be a pair
425 436 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
426 437 return repo.lookup(first), None
427 438
428 439 return repo.lookup(first), repo.lookup(second)
429 440
430 441 def revrange(repo, specs):
431 442 """Execute 1 to many revsets and return the union.
432 443
433 444 This is the preferred mechanism for executing revsets using user-specified
434 445 config options, such as revset aliases.
435 446
436 447 The revsets specified by ``specs`` will be executed via a chained ``OR``
437 448 expression. If ``specs`` is empty, an empty result is returned.
438 449
439 450 ``specs`` can contain integers, in which case they are assumed to be
440 451 revision numbers.
441 452
442 453 It is assumed the revsets are already formatted. If you have arguments
443 454 that need to be expanded in the revset, call ``revsetlang.formatspec()``
444 455 and pass the result as an element of ``specs``.
445 456
446 457 Specifying a single revset is allowed.
447 458
448 459 Returns a ``revset.abstractsmartset`` which is a list-like interface over
449 460 integer revisions.
450 461 """
451 462 allspecs = []
452 463 for spec in specs:
453 464 if isinstance(spec, int):
454 465 spec = revsetlang.formatspec('rev(%d)', spec)
455 466 allspecs.append(spec)
456 467 return repo.anyrevs(allspecs, user=True)
457 468
458 469 def meaningfulparents(repo, ctx):
459 470 """Return list of meaningful (or all if debug) parentrevs for rev.
460 471
461 472 For merges (two non-nullrev revisions) both parents are meaningful.
462 473 Otherwise the first parent revision is considered meaningful if it
463 474 is not the preceding revision.
464 475 """
465 476 parents = ctx.parents()
466 477 if len(parents) > 1:
467 478 return parents
468 479 if repo.ui.debugflag:
469 480 return [parents[0], repo['null']]
470 481 if parents[0].rev() >= intrev(ctx) - 1:
471 482 return []
472 483 return parents
473 484
474 485 def expandpats(pats):
475 486 '''Expand bare globs when running on windows.
476 487 On posix we assume it already has already been done by sh.'''
477 488 if not util.expandglobs:
478 489 return list(pats)
479 490 ret = []
480 491 for kindpat in pats:
481 492 kind, pat = matchmod._patsplit(kindpat, None)
482 493 if kind is None:
483 494 try:
484 495 globbed = glob.glob(pat)
485 496 except re.error:
486 497 globbed = [pat]
487 498 if globbed:
488 499 ret.extend(globbed)
489 500 continue
490 501 ret.append(kindpat)
491 502 return ret
492 503
493 504 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
494 505 badfn=None):
495 506 '''Return a matcher and the patterns that were used.
496 507 The matcher will warn about bad matches, unless an alternate badfn callback
497 508 is provided.'''
498 509 if pats == ("",):
499 510 pats = []
500 511 if opts is None:
501 512 opts = {}
502 513 if not globbed and default == 'relpath':
503 514 pats = expandpats(pats or [])
504 515
505 516 def bad(f, msg):
506 517 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
507 518
508 519 if badfn is None:
509 520 badfn = bad
510 521
511 522 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
512 523 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
513 524
514 525 if m.always():
515 526 pats = []
516 527 return m, pats
517 528
518 529 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
519 530 badfn=None):
520 531 '''Return a matcher that will warn about bad matches.'''
521 532 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
522 533
523 534 def matchall(repo):
524 535 '''Return a matcher that will efficiently match everything.'''
525 536 return matchmod.always(repo.root, repo.getcwd())
526 537
527 538 def matchfiles(repo, files, badfn=None):
528 539 '''Return a matcher that will efficiently match exactly these files.'''
529 540 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
530 541
531 542 def origpath(ui, repo, filepath):
532 543 '''customize where .orig files are created
533 544
534 545 Fetch user defined path from config file: [ui] origbackuppath = <path>
535 546 Fall back to default (filepath) if not specified
536 547 '''
537 548 origbackuppath = ui.config('ui', 'origbackuppath', None)
538 549 if origbackuppath is None:
539 550 return filepath + ".orig"
540 551
541 552 filepathfromroot = os.path.relpath(filepath, start=repo.root)
542 553 fullorigpath = repo.wjoin(origbackuppath, filepathfromroot)
543 554
544 555 origbackupdir = repo.vfs.dirname(fullorigpath)
545 556 if not repo.vfs.exists(origbackupdir):
546 557 ui.note(_('creating directory: %s\n') % origbackupdir)
547 558 util.makedirs(origbackupdir)
548 559
549 560 return fullorigpath + ".orig"
550 561
551 562 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
552 563 if opts is None:
553 564 opts = {}
554 565 m = matcher
555 566 if dry_run is None:
556 567 dry_run = opts.get('dry_run')
557 568 if similarity is None:
558 569 similarity = float(opts.get('similarity') or 0)
559 570
560 571 ret = 0
561 572 join = lambda f: os.path.join(prefix, f)
562 573
563 574 wctx = repo[None]
564 575 for subpath in sorted(wctx.substate):
565 576 submatch = matchmod.subdirmatcher(subpath, m)
566 577 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
567 578 sub = wctx.sub(subpath)
568 579 try:
569 580 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
570 581 ret = 1
571 582 except error.LookupError:
572 583 repo.ui.status(_("skipping missing subrepository: %s\n")
573 584 % join(subpath))
574 585
575 586 rejected = []
576 587 def badfn(f, msg):
577 588 if f in m.files():
578 589 m.bad(f, msg)
579 590 rejected.append(f)
580 591
581 592 badmatch = matchmod.badmatch(m, badfn)
582 593 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
583 594 badmatch)
584 595
585 596 unknownset = set(unknown + forgotten)
586 597 toprint = unknownset.copy()
587 598 toprint.update(deleted)
588 599 for abs in sorted(toprint):
589 600 if repo.ui.verbose or not m.exact(abs):
590 601 if abs in unknownset:
591 602 status = _('adding %s\n') % m.uipath(abs)
592 603 else:
593 604 status = _('removing %s\n') % m.uipath(abs)
594 605 repo.ui.status(status)
595 606
596 607 renames = _findrenames(repo, m, added + unknown, removed + deleted,
597 608 similarity)
598 609
599 610 if not dry_run:
600 611 _markchanges(repo, unknown + forgotten, deleted, renames)
601 612
602 613 for f in rejected:
603 614 if f in m.files():
604 615 return 1
605 616 return ret
606 617
607 618 def marktouched(repo, files, similarity=0.0):
608 619 '''Assert that files have somehow been operated upon. files are relative to
609 620 the repo root.'''
610 621 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
611 622 rejected = []
612 623
613 624 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
614 625
615 626 if repo.ui.verbose:
616 627 unknownset = set(unknown + forgotten)
617 628 toprint = unknownset.copy()
618 629 toprint.update(deleted)
619 630 for abs in sorted(toprint):
620 631 if abs in unknownset:
621 632 status = _('adding %s\n') % abs
622 633 else:
623 634 status = _('removing %s\n') % abs
624 635 repo.ui.status(status)
625 636
626 637 renames = _findrenames(repo, m, added + unknown, removed + deleted,
627 638 similarity)
628 639
629 640 _markchanges(repo, unknown + forgotten, deleted, renames)
630 641
631 642 for f in rejected:
632 643 if f in m.files():
633 644 return 1
634 645 return 0
635 646
636 647 def _interestingfiles(repo, matcher):
637 648 '''Walk dirstate with matcher, looking for files that addremove would care
638 649 about.
639 650
640 651 This is different from dirstate.status because it doesn't care about
641 652 whether files are modified or clean.'''
642 653 added, unknown, deleted, removed, forgotten = [], [], [], [], []
643 654 audit_path = pathutil.pathauditor(repo.root)
644 655
645 656 ctx = repo[None]
646 657 dirstate = repo.dirstate
647 658 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
648 659 full=False)
649 660 for abs, st in walkresults.iteritems():
650 661 dstate = dirstate[abs]
651 662 if dstate == '?' and audit_path.check(abs):
652 663 unknown.append(abs)
653 664 elif dstate != 'r' and not st:
654 665 deleted.append(abs)
655 666 elif dstate == 'r' and st:
656 667 forgotten.append(abs)
657 668 # for finding renames
658 669 elif dstate == 'r' and not st:
659 670 removed.append(abs)
660 671 elif dstate == 'a':
661 672 added.append(abs)
662 673
663 674 return added, unknown, deleted, removed, forgotten
664 675
665 676 def _findrenames(repo, matcher, added, removed, similarity):
666 677 '''Find renames from removed files to added ones.'''
667 678 renames = {}
668 679 if similarity > 0:
669 680 for old, new, score in similar.findrenames(repo, added, removed,
670 681 similarity):
671 682 if (repo.ui.verbose or not matcher.exact(old)
672 683 or not matcher.exact(new)):
673 684 repo.ui.status(_('recording removal of %s as rename to %s '
674 685 '(%d%% similar)\n') %
675 686 (matcher.rel(old), matcher.rel(new),
676 687 score * 100))
677 688 renames[new] = old
678 689 return renames
679 690
680 691 def _markchanges(repo, unknown, deleted, renames):
681 692 '''Marks the files in unknown as added, the files in deleted as removed,
682 693 and the files in renames as copied.'''
683 694 wctx = repo[None]
684 695 with repo.wlock():
685 696 wctx.forget(deleted)
686 697 wctx.add(unknown)
687 698 for new, old in renames.iteritems():
688 699 wctx.copy(old, new)
689 700
690 701 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
691 702 """Update the dirstate to reflect the intent of copying src to dst. For
692 703 different reasons it might not end with dst being marked as copied from src.
693 704 """
694 705 origsrc = repo.dirstate.copied(src) or src
695 706 if dst == origsrc: # copying back a copy?
696 707 if repo.dirstate[dst] not in 'mn' and not dryrun:
697 708 repo.dirstate.normallookup(dst)
698 709 else:
699 710 if repo.dirstate[origsrc] == 'a' and origsrc == src:
700 711 if not ui.quiet:
701 712 ui.warn(_("%s has not been committed yet, so no copy "
702 713 "data will be stored for %s.\n")
703 714 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
704 715 if repo.dirstate[dst] in '?r' and not dryrun:
705 716 wctx.add([dst])
706 717 elif not dryrun:
707 718 wctx.copy(origsrc, dst)
708 719
709 720 def readrequires(opener, supported):
710 721 '''Reads and parses .hg/requires and checks if all entries found
711 722 are in the list of supported features.'''
712 723 requirements = set(opener.read("requires").splitlines())
713 724 missings = []
714 725 for r in requirements:
715 726 if r not in supported:
716 727 if not r or not r[0].isalnum():
717 728 raise error.RequirementError(_(".hg/requires file is corrupt"))
718 729 missings.append(r)
719 730 missings.sort()
720 731 if missings:
721 732 raise error.RequirementError(
722 733 _("repository requires features unknown to this Mercurial: %s")
723 734 % " ".join(missings),
724 735 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
725 736 " for more information"))
726 737 return requirements
727 738
728 739 def writerequires(opener, requirements):
729 740 with opener('requires', 'w') as fp:
730 741 for r in sorted(requirements):
731 742 fp.write("%s\n" % r)
732 743
733 744 class filecachesubentry(object):
734 745 def __init__(self, path, stat):
735 746 self.path = path
736 747 self.cachestat = None
737 748 self._cacheable = None
738 749
739 750 if stat:
740 751 self.cachestat = filecachesubentry.stat(self.path)
741 752
742 753 if self.cachestat:
743 754 self._cacheable = self.cachestat.cacheable()
744 755 else:
745 756 # None means we don't know yet
746 757 self._cacheable = None
747 758
748 759 def refresh(self):
749 760 if self.cacheable():
750 761 self.cachestat = filecachesubentry.stat(self.path)
751 762
752 763 def cacheable(self):
753 764 if self._cacheable is not None:
754 765 return self._cacheable
755 766
756 767 # we don't know yet, assume it is for now
757 768 return True
758 769
759 770 def changed(self):
760 771 # no point in going further if we can't cache it
761 772 if not self.cacheable():
762 773 return True
763 774
764 775 newstat = filecachesubentry.stat(self.path)
765 776
766 777 # we may not know if it's cacheable yet, check again now
767 778 if newstat and self._cacheable is None:
768 779 self._cacheable = newstat.cacheable()
769 780
770 781 # check again
771 782 if not self._cacheable:
772 783 return True
773 784
774 785 if self.cachestat != newstat:
775 786 self.cachestat = newstat
776 787 return True
777 788 else:
778 789 return False
779 790
780 791 @staticmethod
781 792 def stat(path):
782 793 try:
783 794 return util.cachestat(path)
784 795 except OSError as e:
785 796 if e.errno != errno.ENOENT:
786 797 raise
787 798
788 799 class filecacheentry(object):
789 800 def __init__(self, paths, stat=True):
790 801 self._entries = []
791 802 for path in paths:
792 803 self._entries.append(filecachesubentry(path, stat))
793 804
794 805 def changed(self):
795 806 '''true if any entry has changed'''
796 807 for entry in self._entries:
797 808 if entry.changed():
798 809 return True
799 810 return False
800 811
801 812 def refresh(self):
802 813 for entry in self._entries:
803 814 entry.refresh()
804 815
805 816 class filecache(object):
806 817 '''A property like decorator that tracks files under .hg/ for updates.
807 818
808 819 Records stat info when called in _filecache.
809 820
810 821 On subsequent calls, compares old stat info with new info, and recreates the
811 822 object when any of the files changes, updating the new stat info in
812 823 _filecache.
813 824
814 825 Mercurial either atomic renames or appends for files under .hg,
815 826 so to ensure the cache is reliable we need the filesystem to be able
816 827 to tell us if a file has been replaced. If it can't, we fallback to
817 828 recreating the object on every call (essentially the same behavior as
818 829 propertycache).
819 830
820 831 '''
821 832 def __init__(self, *paths):
822 833 self.paths = paths
823 834
824 835 def join(self, obj, fname):
825 836 """Used to compute the runtime path of a cached file.
826 837
827 838 Users should subclass filecache and provide their own version of this
828 839 function to call the appropriate join function on 'obj' (an instance
829 840 of the class that its member function was decorated).
830 841 """
831 842 raise NotImplementedError
832 843
833 844 def __call__(self, func):
834 845 self.func = func
835 846 self.name = func.__name__.encode('ascii')
836 847 return self
837 848
838 849 def __get__(self, obj, type=None):
839 850 # if accessed on the class, return the descriptor itself.
840 851 if obj is None:
841 852 return self
842 853 # do we need to check if the file changed?
843 854 if self.name in obj.__dict__:
844 855 assert self.name in obj._filecache, self.name
845 856 return obj.__dict__[self.name]
846 857
847 858 entry = obj._filecache.get(self.name)
848 859
849 860 if entry:
850 861 if entry.changed():
851 862 entry.obj = self.func(obj)
852 863 else:
853 864 paths = [self.join(obj, path) for path in self.paths]
854 865
855 866 # We stat -before- creating the object so our cache doesn't lie if
856 867 # a writer modified between the time we read and stat
857 868 entry = filecacheentry(paths, True)
858 869 entry.obj = self.func(obj)
859 870
860 871 obj._filecache[self.name] = entry
861 872
862 873 obj.__dict__[self.name] = entry.obj
863 874 return entry.obj
864 875
865 876 def __set__(self, obj, value):
866 877 if self.name not in obj._filecache:
867 878 # we add an entry for the missing value because X in __dict__
868 879 # implies X in _filecache
869 880 paths = [self.join(obj, path) for path in self.paths]
870 881 ce = filecacheentry(paths, False)
871 882 obj._filecache[self.name] = ce
872 883 else:
873 884 ce = obj._filecache[self.name]
874 885
875 886 ce.obj = value # update cached copy
876 887 obj.__dict__[self.name] = value # update copy returned by obj.x
877 888
878 889 def __delete__(self, obj):
879 890 try:
880 891 del obj.__dict__[self.name]
881 892 except KeyError:
882 893 raise AttributeError(self.name)
883 894
884 895 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
885 896 if lock is None:
886 897 raise error.LockInheritanceContractViolation(
887 898 'lock can only be inherited while held')
888 899 if environ is None:
889 900 environ = {}
890 901 with lock.inherit() as locker:
891 902 environ[envvar] = locker
892 903 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
893 904
894 905 def wlocksub(repo, cmd, *args, **kwargs):
895 906 """run cmd as a subprocess that allows inheriting repo's wlock
896 907
897 908 This can only be called while the wlock is held. This takes all the
898 909 arguments that ui.system does, and returns the exit code of the
899 910 subprocess."""
900 911 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
901 912 **kwargs)
902 913
903 914 def gdinitconfig(ui):
904 915 """helper function to know if a repo should be created as general delta
905 916 """
906 917 # experimental config: format.generaldelta
907 918 return (ui.configbool('format', 'generaldelta', False)
908 919 or ui.configbool('format', 'usegeneraldelta', True))
909 920
910 921 def gddeltaconfig(ui):
911 922 """helper function to know if incoming delta should be optimised
912 923 """
913 924 # experimental config: format.generaldelta
914 925 return ui.configbool('format', 'generaldelta', False)
915 926
916 927 class simplekeyvaluefile(object):
917 928 """A simple file with key=value lines
918 929
919 930 Keys must be alphanumerics and start with a letter, values must not
920 931 contain '\n' characters"""
921 932 firstlinekey = '__firstline'
922 933
923 934 def __init__(self, vfs, path, keys=None):
924 935 self.vfs = vfs
925 936 self.path = path
926 937
927 938 def read(self, firstlinenonkeyval=False):
928 939 """Read the contents of a simple key-value file
929 940
930 941 'firstlinenonkeyval' indicates whether the first line of file should
931 942 be treated as a key-value pair or reuturned fully under the
932 943 __firstline key."""
933 944 lines = self.vfs.readlines(self.path)
934 945 d = {}
935 946 if firstlinenonkeyval:
936 947 if not lines:
937 948 e = _("empty simplekeyvalue file")
938 949 raise error.CorruptedState(e)
939 950 # we don't want to include '\n' in the __firstline
940 951 d[self.firstlinekey] = lines[0][:-1]
941 952 del lines[0]
942 953
943 954 try:
944 955 # the 'if line.strip()' part prevents us from failing on empty
945 956 # lines which only contain '\n' therefore are not skipped
946 957 # by 'if line'
947 958 updatedict = dict(line[:-1].split('=', 1) for line in lines
948 959 if line.strip())
949 960 if self.firstlinekey in updatedict:
950 961 e = _("%r can't be used as a key")
951 962 raise error.CorruptedState(e % self.firstlinekey)
952 963 d.update(updatedict)
953 964 except ValueError as e:
954 965 raise error.CorruptedState(str(e))
955 966 return d
956 967
957 968 def write(self, data, firstline=None):
958 969 """Write key=>value mapping to a file
959 970 data is a dict. Keys must be alphanumerical and start with a letter.
960 971 Values must not contain newline characters.
961 972
962 973 If 'firstline' is not None, it is written to file before
963 974 everything else, as it is, not in a key=value form"""
964 975 lines = []
965 976 if firstline is not None:
966 977 lines.append('%s\n' % firstline)
967 978
968 979 for k, v in data.items():
969 980 if k == self.firstlinekey:
970 981 e = "key name '%s' is reserved" % self.firstlinekey
971 982 raise error.ProgrammingError(e)
972 983 if not k[0].isalpha():
973 984 e = "keys must start with a letter in a key-value file"
974 985 raise error.ProgrammingError(e)
975 986 if not k.isalnum():
976 987 e = "invalid key name in a simple key-value file"
977 988 raise error.ProgrammingError(e)
978 989 if '\n' in v:
979 990 e = "invalid value in a simple key-value file"
980 991 raise error.ProgrammingError(e)
981 992 lines.append("%s=%s\n" % (k, v))
982 993 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
983 994 fp.write(''.join(lines))
@@ -1,671 +1,676 b''
1 1 # templatekw.py - common changeset template keywords
2 2 #
3 3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 from .i18n import _
11 from .node import hex, nullid
11 from .node import (
12 hex,
13 nullid,
14 short,
15 )
16
12 17 from . import (
13 18 encoding,
14 19 error,
15 20 hbisect,
16 21 patch,
17 22 registrar,
18 23 scmutil,
19 24 util,
20 25 )
21 26
22 27 class _hybrid(object):
23 28 """Wrapper for list or dict to support legacy template
24 29
25 30 This class allows us to handle both:
26 31 - "{files}" (legacy command-line-specific list hack) and
27 32 - "{files % '{file}\n'}" (hgweb-style with inlining and function support)
28 33 and to access raw values:
29 34 - "{ifcontains(file, files, ...)}", "{ifcontains(key, extras, ...)}"
30 35 - "{get(extras, key)}"
31 36 - "{files|json}"
32 37 """
33 38
34 39 def __init__(self, gen, values, makemap, joinfmt):
35 40 if gen is not None:
36 41 self.gen = gen
37 42 self._values = values
38 43 self._makemap = makemap
39 44 self.joinfmt = joinfmt
40 45 @util.propertycache
41 46 def gen(self):
42 47 return self._defaultgen()
43 48 def _defaultgen(self):
44 49 """Generator to stringify this as {join(self, ' ')}"""
45 50 for i, d in enumerate(self.itermaps()):
46 51 if i > 0:
47 52 yield ' '
48 53 yield self.joinfmt(d)
49 54 def itermaps(self):
50 55 makemap = self._makemap
51 56 for x in self._values:
52 57 yield makemap(x)
53 58 def __contains__(self, x):
54 59 return x in self._values
55 60 def __len__(self):
56 61 return len(self._values)
57 62 def __iter__(self):
58 63 return iter(self._values)
59 64 def __getattr__(self, name):
60 65 if name not in ('get', 'items', 'iteritems', 'iterkeys', 'itervalues',
61 66 'keys', 'values'):
62 67 raise AttributeError(name)
63 68 return getattr(self._values, name)
64 69
65 70 def hybriddict(data, key='key', value='value', fmt='%s=%s', gen=None):
66 71 """Wrap data to support both dict-like and string-like operations"""
67 72 return _hybrid(gen, data, lambda k: {key: k, value: data[k]},
68 73 lambda d: fmt % (d[key], d[value]))
69 74
70 75 def hybridlist(data, name, fmt='%s', gen=None):
71 76 """Wrap data to support both list-like and string-like operations"""
72 77 return _hybrid(gen, data, lambda x: {name: x}, lambda d: fmt % d[name])
73 78
74 79 def unwraphybrid(thing):
75 80 """Return an object which can be stringified possibly by using a legacy
76 81 template"""
77 82 if not util.safehasattr(thing, 'gen'):
78 83 return thing
79 84 return thing.gen
80 85
81 86 def showdict(name, data, mapping, plural=None, key='key', value='value',
82 87 fmt='%s=%s', separator=' '):
83 88 c = [{key: k, value: v} for k, v in data.iteritems()]
84 89 f = _showlist(name, c, mapping, plural, separator)
85 90 return hybriddict(data, key=key, value=value, fmt=fmt, gen=f)
86 91
87 92 def showlist(name, values, mapping, plural=None, element=None, separator=' '):
88 93 if not element:
89 94 element = name
90 95 f = _showlist(name, values, mapping, plural, separator)
91 96 return hybridlist(values, name=element, gen=f)
92 97
93 98 def _showlist(name, values, mapping, plural=None, separator=' '):
94 99 '''expand set of values.
95 100 name is name of key in template map.
96 101 values is list of strings or dicts.
97 102 plural is plural of name, if not simply name + 's'.
98 103 separator is used to join values as a string
99 104
100 105 expansion works like this, given name 'foo'.
101 106
102 107 if values is empty, expand 'no_foos'.
103 108
104 109 if 'foo' not in template map, return values as a string,
105 110 joined by 'separator'.
106 111
107 112 expand 'start_foos'.
108 113
109 114 for each value, expand 'foo'. if 'last_foo' in template
110 115 map, expand it instead of 'foo' for last key.
111 116
112 117 expand 'end_foos'.
113 118 '''
114 119 templ = mapping['templ']
115 120 if not plural:
116 121 plural = name + 's'
117 122 if not values:
118 123 noname = 'no_' + plural
119 124 if noname in templ:
120 125 yield templ(noname, **mapping)
121 126 return
122 127 if name not in templ:
123 128 if isinstance(values[0], str):
124 129 yield separator.join(values)
125 130 else:
126 131 for v in values:
127 132 yield dict(v, **mapping)
128 133 return
129 134 startname = 'start_' + plural
130 135 if startname in templ:
131 136 yield templ(startname, **mapping)
132 137 vmapping = mapping.copy()
133 138 def one(v, tag=name):
134 139 try:
135 140 vmapping.update(v)
136 141 except (AttributeError, ValueError):
137 142 try:
138 143 for a, b in v:
139 144 vmapping[a] = b
140 145 except ValueError:
141 146 vmapping[name] = v
142 147 return templ(tag, **vmapping)
143 148 lastname = 'last_' + name
144 149 if lastname in templ:
145 150 last = values.pop()
146 151 else:
147 152 last = None
148 153 for v in values:
149 154 yield one(v)
150 155 if last is not None:
151 156 yield one(last, tag=lastname)
152 157 endname = 'end_' + plural
153 158 if endname in templ:
154 159 yield templ(endname, **mapping)
155 160
156 161 def _formatrevnode(ctx):
157 162 """Format changeset as '{rev}:{node|formatnode}', which is the default
158 163 template provided by cmdutil.changeset_templater"""
159 164 repo = ctx.repo()
160 165 if repo.ui.debugflag:
161 hexnode = ctx.hex()
166 hexfunc = hex
162 167 else:
163 hexnode = ctx.hex()[:12]
164 return '%d:%s' % (scmutil.intrev(ctx), hexnode)
168 hexfunc = short
169 return '%d:%s' % (scmutil.intrev(ctx), hexfunc(scmutil.binnode(ctx)))
165 170
166 171 def getfiles(repo, ctx, revcache):
167 172 if 'files' not in revcache:
168 173 revcache['files'] = repo.status(ctx.p1(), ctx)[:3]
169 174 return revcache['files']
170 175
171 176 def getlatesttags(repo, ctx, cache, pattern=None):
172 177 '''return date, distance and name for the latest tag of rev'''
173 178
174 179 cachename = 'latesttags'
175 180 if pattern is not None:
176 181 cachename += '-' + pattern
177 182 match = util.stringmatcher(pattern)[2]
178 183 else:
179 184 match = util.always
180 185
181 186 if cachename not in cache:
182 187 # Cache mapping from rev to a tuple with tag date, tag
183 188 # distance and tag name
184 189 cache[cachename] = {-1: (0, 0, ['null'])}
185 190 latesttags = cache[cachename]
186 191
187 192 rev = ctx.rev()
188 193 todo = [rev]
189 194 while todo:
190 195 rev = todo.pop()
191 196 if rev in latesttags:
192 197 continue
193 198 ctx = repo[rev]
194 199 tags = [t for t in ctx.tags()
195 200 if (repo.tagtype(t) and repo.tagtype(t) != 'local'
196 201 and match(t))]
197 202 if tags:
198 203 latesttags[rev] = ctx.date()[0], 0, [t for t in sorted(tags)]
199 204 continue
200 205 try:
201 206 # The tuples are laid out so the right one can be found by
202 207 # comparison.
203 208 pdate, pdist, ptag = max(
204 209 latesttags[p.rev()] for p in ctx.parents())
205 210 except KeyError:
206 211 # Cache miss - recurse
207 212 todo.append(rev)
208 213 todo.extend(p.rev() for p in ctx.parents())
209 214 continue
210 215 latesttags[rev] = pdate, pdist + 1, ptag
211 216 return latesttags[rev]
212 217
213 218 def getrenamedfn(repo, endrev=None):
214 219 rcache = {}
215 220 if endrev is None:
216 221 endrev = len(repo)
217 222
218 223 def getrenamed(fn, rev):
219 224 '''looks up all renames for a file (up to endrev) the first
220 225 time the file is given. It indexes on the changerev and only
221 226 parses the manifest if linkrev != changerev.
222 227 Returns rename info for fn at changerev rev.'''
223 228 if fn not in rcache:
224 229 rcache[fn] = {}
225 230 fl = repo.file(fn)
226 231 for i in fl:
227 232 lr = fl.linkrev(i)
228 233 renamed = fl.renamed(fl.node(i))
229 234 rcache[fn][lr] = renamed
230 235 if lr >= endrev:
231 236 break
232 237 if rev in rcache[fn]:
233 238 return rcache[fn][rev]
234 239
235 240 # If linkrev != rev (i.e. rev not found in rcache) fallback to
236 241 # filectx logic.
237 242 try:
238 243 return repo[rev][fn].renamed()
239 244 except error.LookupError:
240 245 return None
241 246
242 247 return getrenamed
243 248
244 249 # default templates internally used for rendering of lists
245 250 defaulttempl = {
246 251 'parent': '{rev}:{node|formatnode} ',
247 252 'manifest': '{rev}:{node|formatnode}',
248 253 'file_copy': '{name} ({source})',
249 254 'envvar': '{key}={value}',
250 255 'extra': '{key}={value|stringescape}'
251 256 }
252 257 # filecopy is preserved for compatibility reasons
253 258 defaulttempl['filecopy'] = defaulttempl['file_copy']
254 259
255 260 # keywords are callables like:
256 261 # fn(repo, ctx, templ, cache, revcache, **args)
257 262 # with:
258 263 # repo - current repository instance
259 264 # ctx - the changectx being displayed
260 265 # templ - the templater instance
261 266 # cache - a cache dictionary for the whole templater run
262 267 # revcache - a cache dictionary for the current revision
263 268 keywords = {}
264 269
265 270 templatekeyword = registrar.templatekeyword(keywords)
266 271
267 272 @templatekeyword('author')
268 273 def showauthor(repo, ctx, templ, **args):
269 274 """String. The unmodified author of the changeset."""
270 275 return ctx.user()
271 276
272 277 @templatekeyword('bisect')
273 278 def showbisect(repo, ctx, templ, **args):
274 279 """String. The changeset bisection status."""
275 280 return hbisect.label(repo, ctx.node())
276 281
277 282 @templatekeyword('branch')
278 283 def showbranch(**args):
279 284 """String. The name of the branch on which the changeset was
280 285 committed.
281 286 """
282 287 return args['ctx'].branch()
283 288
284 289 @templatekeyword('branches')
285 290 def showbranches(**args):
286 291 """List of strings. The name of the branch on which the
287 292 changeset was committed. Will be empty if the branch name was
288 293 default. (DEPRECATED)
289 294 """
290 295 branch = args['ctx'].branch()
291 296 if branch != 'default':
292 297 return showlist('branch', [branch], args, plural='branches')
293 298 return showlist('branch', [], args, plural='branches')
294 299
295 300 @templatekeyword('bookmarks')
296 301 def showbookmarks(**args):
297 302 """List of strings. Any bookmarks associated with the
298 303 changeset. Also sets 'active', the name of the active bookmark.
299 304 """
300 305 repo = args['ctx']._repo
301 306 bookmarks = args['ctx'].bookmarks()
302 307 active = repo._activebookmark
303 308 makemap = lambda v: {'bookmark': v, 'active': active, 'current': active}
304 309 f = _showlist('bookmark', bookmarks, args)
305 310 return _hybrid(f, bookmarks, makemap, lambda x: x['bookmark'])
306 311
307 312 @templatekeyword('children')
308 313 def showchildren(**args):
309 314 """List of strings. The children of the changeset."""
310 315 ctx = args['ctx']
311 316 childrevs = ['%d:%s' % (cctx, cctx) for cctx in ctx.children()]
312 317 return showlist('children', childrevs, args, element='child')
313 318
314 319 # Deprecated, but kept alive for help generation a purpose.
315 320 @templatekeyword('currentbookmark')
316 321 def showcurrentbookmark(**args):
317 322 """String. The active bookmark, if it is
318 323 associated with the changeset (DEPRECATED)"""
319 324 return showactivebookmark(**args)
320 325
321 326 @templatekeyword('activebookmark')
322 327 def showactivebookmark(**args):
323 328 """String. The active bookmark, if it is
324 329 associated with the changeset"""
325 330 active = args['repo']._activebookmark
326 331 if active and active in args['ctx'].bookmarks():
327 332 return active
328 333 return ''
329 334
330 335 @templatekeyword('date')
331 336 def showdate(repo, ctx, templ, **args):
332 337 """Date information. The date when the changeset was committed."""
333 338 return ctx.date()
334 339
335 340 @templatekeyword('desc')
336 341 def showdescription(repo, ctx, templ, **args):
337 342 """String. The text of the changeset description."""
338 343 s = ctx.description()
339 344 if isinstance(s, encoding.localstr):
340 345 # try hard to preserve utf-8 bytes
341 346 return encoding.tolocal(encoding.fromlocal(s).strip())
342 347 else:
343 348 return s.strip()
344 349
345 350 @templatekeyword('diffstat')
346 351 def showdiffstat(repo, ctx, templ, **args):
347 352 """String. Statistics of changes with the following format:
348 353 "modified files: +added/-removed lines"
349 354 """
350 355 stats = patch.diffstatdata(util.iterlines(ctx.diff(noprefix=False)))
351 356 maxname, maxtotal, adds, removes, binary = patch.diffstatsum(stats)
352 357 return '%s: +%s/-%s' % (len(stats), adds, removes)
353 358
354 359 @templatekeyword('envvars')
355 360 def showenvvars(repo, **args):
356 361 """A dictionary of environment variables. (EXPERIMENTAL)"""
357 362 env = repo.ui.exportableenviron()
358 363 env = util.sortdict((k, env[k]) for k in sorted(env))
359 364 return showdict('envvar', env, args, plural='envvars')
360 365
361 366 @templatekeyword('extras')
362 367 def showextras(**args):
363 368 """List of dicts with key, value entries of the 'extras'
364 369 field of this changeset."""
365 370 extras = args['ctx'].extra()
366 371 extras = util.sortdict((k, extras[k]) for k in sorted(extras))
367 372 makemap = lambda k: {'key': k, 'value': extras[k]}
368 373 c = [makemap(k) for k in extras]
369 374 f = _showlist('extra', c, args, plural='extras')
370 375 return _hybrid(f, extras, makemap,
371 376 lambda x: '%s=%s' % (x['key'], util.escapestr(x['value'])))
372 377
373 378 @templatekeyword('file_adds')
374 379 def showfileadds(**args):
375 380 """List of strings. Files added by this changeset."""
376 381 repo, ctx, revcache = args['repo'], args['ctx'], args['revcache']
377 382 return showlist('file_add', getfiles(repo, ctx, revcache)[1], args,
378 383 element='file')
379 384
380 385 @templatekeyword('file_copies')
381 386 def showfilecopies(**args):
382 387 """List of strings. Files copied in this changeset with
383 388 their sources.
384 389 """
385 390 cache, ctx = args['cache'], args['ctx']
386 391 copies = args['revcache'].get('copies')
387 392 if copies is None:
388 393 if 'getrenamed' not in cache:
389 394 cache['getrenamed'] = getrenamedfn(args['repo'])
390 395 copies = []
391 396 getrenamed = cache['getrenamed']
392 397 for fn in ctx.files():
393 398 rename = getrenamed(fn, ctx.rev())
394 399 if rename:
395 400 copies.append((fn, rename[0]))
396 401
397 402 copies = util.sortdict(copies)
398 403 return showdict('file_copy', copies, args, plural='file_copies',
399 404 key='name', value='source', fmt='%s (%s)')
400 405
401 406 # showfilecopiesswitch() displays file copies only if copy records are
402 407 # provided before calling the templater, usually with a --copies
403 408 # command line switch.
404 409 @templatekeyword('file_copies_switch')
405 410 def showfilecopiesswitch(**args):
406 411 """List of strings. Like "file_copies" but displayed
407 412 only if the --copied switch is set.
408 413 """
409 414 copies = args['revcache'].get('copies') or []
410 415 copies = util.sortdict(copies)
411 416 return showdict('file_copy', copies, args, plural='file_copies',
412 417 key='name', value='source', fmt='%s (%s)')
413 418
414 419 @templatekeyword('file_dels')
415 420 def showfiledels(**args):
416 421 """List of strings. Files removed by this changeset."""
417 422 repo, ctx, revcache = args['repo'], args['ctx'], args['revcache']
418 423 return showlist('file_del', getfiles(repo, ctx, revcache)[2], args,
419 424 element='file')
420 425
421 426 @templatekeyword('file_mods')
422 427 def showfilemods(**args):
423 428 """List of strings. Files modified by this changeset."""
424 429 repo, ctx, revcache = args['repo'], args['ctx'], args['revcache']
425 430 return showlist('file_mod', getfiles(repo, ctx, revcache)[0], args,
426 431 element='file')
427 432
428 433 @templatekeyword('files')
429 434 def showfiles(**args):
430 435 """List of strings. All files modified, added, or removed by this
431 436 changeset.
432 437 """
433 438 return showlist('file', args['ctx'].files(), args)
434 439
435 440 @templatekeyword('graphnode')
436 441 def showgraphnode(repo, ctx, **args):
437 442 """String. The character representing the changeset node in
438 443 an ASCII revision graph"""
439 444 wpnodes = repo.dirstate.parents()
440 445 if wpnodes[1] == nullid:
441 446 wpnodes = wpnodes[:1]
442 447 if ctx.node() in wpnodes:
443 448 return '@'
444 449 elif ctx.obsolete():
445 450 return 'x'
446 451 elif ctx.closesbranch():
447 452 return '_'
448 453 else:
449 454 return 'o'
450 455
451 456 @templatekeyword('index')
452 457 def showindex(**args):
453 458 """Integer. The current iteration of the loop. (0 indexed)"""
454 459 # just hosts documentation; should be overridden by template mapping
455 460 raise error.Abort(_("can't use index in this context"))
456 461
457 462 @templatekeyword('latesttag')
458 463 def showlatesttag(**args):
459 464 """List of strings. The global tags on the most recent globally
460 465 tagged ancestor of this changeset. If no such tags exist, the list
461 466 consists of the single string "null".
462 467 """
463 468 return showlatesttags(None, **args)
464 469
465 470 def showlatesttags(pattern, **args):
466 471 """helper method for the latesttag keyword and function"""
467 472 repo, ctx = args['repo'], args['ctx']
468 473 cache = args['cache']
469 474 latesttags = getlatesttags(repo, ctx, cache, pattern)
470 475
471 476 # latesttag[0] is an implementation detail for sorting csets on different
472 477 # branches in a stable manner- it is the date the tagged cset was created,
473 478 # not the date the tag was created. Therefore it isn't made visible here.
474 479 makemap = lambda v: {
475 480 'changes': _showchangessincetag,
476 481 'distance': latesttags[1],
477 482 'latesttag': v, # BC with {latesttag % '{latesttag}'}
478 483 'tag': v
479 484 }
480 485
481 486 tags = latesttags[2]
482 487 f = _showlist('latesttag', tags, args, separator=':')
483 488 return _hybrid(f, tags, makemap, lambda x: x['latesttag'])
484 489
485 490 @templatekeyword('latesttagdistance')
486 491 def showlatesttagdistance(repo, ctx, templ, cache, **args):
487 492 """Integer. Longest path to the latest tag."""
488 493 return getlatesttags(repo, ctx, cache)[1]
489 494
490 495 @templatekeyword('changessincelatesttag')
491 496 def showchangessincelatesttag(repo, ctx, templ, cache, **args):
492 497 """Integer. All ancestors not in the latest tag."""
493 498 latesttag = getlatesttags(repo, ctx, cache)[2][0]
494 499
495 500 return _showchangessincetag(repo, ctx, tag=latesttag, **args)
496 501
497 502 def _showchangessincetag(repo, ctx, **args):
498 503 offset = 0
499 504 revs = [ctx.rev()]
500 505 tag = args['tag']
501 506
502 507 # The only() revset doesn't currently support wdir()
503 508 if ctx.rev() is None:
504 509 offset = 1
505 510 revs = [p.rev() for p in ctx.parents()]
506 511
507 512 return len(repo.revs('only(%ld, %s)', revs, tag)) + offset
508 513
509 514 @templatekeyword('manifest')
510 515 def showmanifest(**args):
511 516 repo, ctx, templ = args['repo'], args['ctx'], args['templ']
512 517 mnode = ctx.manifestnode()
513 518 if mnode is None:
514 519 # just avoid crash, we might want to use the 'ff...' hash in future
515 520 return
516 521 args = args.copy()
517 522 args.update({'rev': repo.manifestlog._revlog.rev(mnode),
518 523 'node': hex(mnode)})
519 524 return templ('manifest', **args)
520 525
521 526 def shownames(namespace, **args):
522 527 """helper method to generate a template keyword for a namespace"""
523 528 ctx = args['ctx']
524 529 repo = ctx.repo()
525 530 ns = repo.names[namespace]
526 531 names = ns.names(repo, ctx.node())
527 532 return showlist(ns.templatename, names, args, plural=namespace)
528 533
529 534 @templatekeyword('namespaces')
530 535 def shownamespaces(**args):
531 536 """Dict of lists. Names attached to this changeset per
532 537 namespace."""
533 538 ctx = args['ctx']
534 539 repo = ctx.repo()
535 540 namespaces = util.sortdict((k, showlist('name', ns.names(repo, ctx.node()),
536 541 args))
537 542 for k, ns in repo.names.iteritems())
538 543 f = _showlist('namespace', list(namespaces), args)
539 544 return _hybrid(f, namespaces,
540 545 lambda k: {'namespace': k, 'names': namespaces[k]},
541 546 lambda x: x['namespace'])
542 547
543 548 @templatekeyword('node')
544 549 def shownode(repo, ctx, templ, **args):
545 550 """String. The changeset identification hash, as a 40 hexadecimal
546 551 digit string.
547 552 """
548 553 return ctx.hex()
549 554
550 555 @templatekeyword('obsolete')
551 556 def showobsolete(repo, ctx, templ, **args):
552 557 """String. Whether the changeset is obsolete.
553 558 """
554 559 if ctx.obsolete():
555 560 return 'obsolete'
556 561 return ''
557 562
558 563 @templatekeyword('p1rev')
559 564 def showp1rev(repo, ctx, templ, **args):
560 565 """Integer. The repository-local revision number of the changeset's
561 566 first parent, or -1 if the changeset has no parents."""
562 567 return ctx.p1().rev()
563 568
564 569 @templatekeyword('p2rev')
565 570 def showp2rev(repo, ctx, templ, **args):
566 571 """Integer. The repository-local revision number of the changeset's
567 572 second parent, or -1 if the changeset has no second parent."""
568 573 return ctx.p2().rev()
569 574
570 575 @templatekeyword('p1node')
571 576 def showp1node(repo, ctx, templ, **args):
572 577 """String. The identification hash of the changeset's first parent,
573 578 as a 40 digit hexadecimal string. If the changeset has no parents, all
574 579 digits are 0."""
575 580 return ctx.p1().hex()
576 581
577 582 @templatekeyword('p2node')
578 583 def showp2node(repo, ctx, templ, **args):
579 584 """String. The identification hash of the changeset's second
580 585 parent, as a 40 digit hexadecimal string. If the changeset has no second
581 586 parent, all digits are 0."""
582 587 return ctx.p2().hex()
583 588
584 589 @templatekeyword('parents')
585 590 def showparents(**args):
586 591 """List of strings. The parents of the changeset in "rev:node"
587 592 format. If the changeset has only one "natural" parent (the predecessor
588 593 revision) nothing is shown."""
589 594 repo = args['repo']
590 595 ctx = args['ctx']
591 596 pctxs = scmutil.meaningfulparents(repo, ctx)
592 597 prevs = [str(p.rev()) for p in pctxs] # ifcontains() needs a list of str
593 598 parents = [[('rev', p.rev()),
594 599 ('node', p.hex()),
595 600 ('phase', p.phasestr())]
596 601 for p in pctxs]
597 602 f = _showlist('parent', parents, args)
598 603 return _hybrid(f, prevs, lambda x: {'ctx': repo[int(x)], 'revcache': {}},
599 604 lambda d: _formatrevnode(d['ctx']))
600 605
601 606 @templatekeyword('phase')
602 607 def showphase(repo, ctx, templ, **args):
603 608 """String. The changeset phase name."""
604 609 return ctx.phasestr()
605 610
606 611 @templatekeyword('phaseidx')
607 612 def showphaseidx(repo, ctx, templ, **args):
608 613 """Integer. The changeset phase index."""
609 614 return ctx.phase()
610 615
611 616 @templatekeyword('rev')
612 617 def showrev(repo, ctx, templ, **args):
613 618 """Integer. The repository-local changeset revision number."""
614 619 return scmutil.intrev(ctx)
615 620
616 621 def showrevslist(name, revs, **args):
617 622 """helper to generate a list of revisions in which a mapped template will
618 623 be evaluated"""
619 624 repo = args['ctx'].repo()
620 625 revs = [str(r) for r in revs] # ifcontains() needs a list of str
621 626 f = _showlist(name, revs, args)
622 627 return _hybrid(f, revs,
623 628 lambda x: {name: x, 'ctx': repo[int(x)], 'revcache': {}},
624 629 lambda d: d[name])
625 630
626 631 @templatekeyword('subrepos')
627 632 def showsubrepos(**args):
628 633 """List of strings. Updated subrepositories in the changeset."""
629 634 ctx = args['ctx']
630 635 substate = ctx.substate
631 636 if not substate:
632 637 return showlist('subrepo', [], args)
633 638 psubstate = ctx.parents()[0].substate or {}
634 639 subrepos = []
635 640 for sub in substate:
636 641 if sub not in psubstate or substate[sub] != psubstate[sub]:
637 642 subrepos.append(sub) # modified or newly added in ctx
638 643 for sub in psubstate:
639 644 if sub not in substate:
640 645 subrepos.append(sub) # removed in ctx
641 646 return showlist('subrepo', sorted(subrepos), args)
642 647
643 648 # don't remove "showtags" definition, even though namespaces will put
644 649 # a helper function for "tags" keyword into "keywords" map automatically,
645 650 # because online help text is built without namespaces initialization
646 651 @templatekeyword('tags')
647 652 def showtags(**args):
648 653 """List of strings. Any tags associated with the changeset."""
649 654 return shownames('tags', **args)
650 655
651 656 def loadkeyword(ui, extname, registrarobj):
652 657 """Load template keyword from specified registrarobj
653 658 """
654 659 for name, func in registrarobj._table.iteritems():
655 660 keywords[name] = func
656 661
657 662 @templatekeyword('termwidth')
658 663 def termwidth(repo, ctx, templ, **args):
659 664 """Integer. The width of the current terminal."""
660 665 return repo.ui.termwidth()
661 666
662 667 @templatekeyword('troubles')
663 668 def showtroubles(**args):
664 669 """List of strings. Evolution troubles affecting the changeset.
665 670
666 671 (EXPERIMENTAL)
667 672 """
668 673 return showlist('trouble', args['ctx'].troubles(), args)
669 674
670 675 # tell hggettext to extract docstrings from these functions:
671 676 i18nfunctions = keywords.values()
General Comments 0
You need to be logged in to leave comments. Login now