##// END OF EJS Templates
templater: port formatnode filter from changeset_templater...
Yuya Nishihara -
r31169:48a8b2e5 default
parent child Browse files
Show More
@@ -1,3478 +1,3475
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import os
12 12 import re
13 13 import tempfile
14 14
15 15 from .i18n import _
16 16 from .node import (
17 17 bin,
18 18 hex,
19 19 nullid,
20 20 nullrev,
21 21 short,
22 22 )
23 23
24 24 from . import (
25 25 bookmarks,
26 26 changelog,
27 27 copies,
28 28 crecord as crecordmod,
29 29 encoding,
30 30 error,
31 31 formatter,
32 32 graphmod,
33 33 lock as lockmod,
34 34 match as matchmod,
35 35 obsolete,
36 36 patch,
37 37 pathutil,
38 38 phases,
39 39 pycompat,
40 40 repair,
41 41 revlog,
42 42 revset,
43 43 scmutil,
44 44 smartset,
45 45 templatekw,
46 46 templater,
47 47 util,
48 48 )
49 49 stringio = util.stringio
50 50
51 51 # special string such that everything below this line will be ingored in the
52 52 # editor text
53 53 _linebelow = "^HG: ------------------------ >8 ------------------------$"
54 54
55 55 def ishunk(x):
56 56 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
57 57 return isinstance(x, hunkclasses)
58 58
59 59 def newandmodified(chunks, originalchunks):
60 60 newlyaddedandmodifiedfiles = set()
61 61 for chunk in chunks:
62 62 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
63 63 originalchunks:
64 64 newlyaddedandmodifiedfiles.add(chunk.header.filename())
65 65 return newlyaddedandmodifiedfiles
66 66
67 67 def parsealiases(cmd):
68 68 return cmd.lstrip("^").split("|")
69 69
70 70 def setupwrapcolorwrite(ui):
71 71 # wrap ui.write so diff output can be labeled/colorized
72 72 def wrapwrite(orig, *args, **kw):
73 73 label = kw.pop('label', '')
74 74 for chunk, l in patch.difflabel(lambda: args):
75 75 orig(chunk, label=label + l)
76 76
77 77 oldwrite = ui.write
78 78 def wrap(*args, **kwargs):
79 79 return wrapwrite(oldwrite, *args, **kwargs)
80 80 setattr(ui, 'write', wrap)
81 81 return oldwrite
82 82
83 83 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
84 84 if usecurses:
85 85 if testfile:
86 86 recordfn = crecordmod.testdecorator(testfile,
87 87 crecordmod.testchunkselector)
88 88 else:
89 89 recordfn = crecordmod.chunkselector
90 90
91 91 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
92 92
93 93 else:
94 94 return patch.filterpatch(ui, originalhunks, operation)
95 95
96 96 def recordfilter(ui, originalhunks, operation=None):
97 97 """ Prompts the user to filter the originalhunks and return a list of
98 98 selected hunks.
99 99 *operation* is used for to build ui messages to indicate the user what
100 100 kind of filtering they are doing: reverting, committing, shelving, etc.
101 101 (see patch.filterpatch).
102 102 """
103 103 usecurses = crecordmod.checkcurses(ui)
104 104 testfile = ui.config('experimental', 'crecordtest', None)
105 105 oldwrite = setupwrapcolorwrite(ui)
106 106 try:
107 107 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
108 108 testfile, operation)
109 109 finally:
110 110 ui.write = oldwrite
111 111 return newchunks, newopts
112 112
113 113 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
114 114 filterfn, *pats, **opts):
115 115 from . import merge as mergemod
116 116 if not ui.interactive():
117 117 if cmdsuggest:
118 118 msg = _('running non-interactively, use %s instead') % cmdsuggest
119 119 else:
120 120 msg = _('running non-interactively')
121 121 raise error.Abort(msg)
122 122
123 123 # make sure username is set before going interactive
124 124 if not opts.get('user'):
125 125 ui.username() # raise exception, username not provided
126 126
127 127 def recordfunc(ui, repo, message, match, opts):
128 128 """This is generic record driver.
129 129
130 130 Its job is to interactively filter local changes, and
131 131 accordingly prepare working directory into a state in which the
132 132 job can be delegated to a non-interactive commit command such as
133 133 'commit' or 'qrefresh'.
134 134
135 135 After the actual job is done by non-interactive command, the
136 136 working directory is restored to its original state.
137 137
138 138 In the end we'll record interesting changes, and everything else
139 139 will be left in place, so the user can continue working.
140 140 """
141 141
142 142 checkunfinished(repo, commit=True)
143 143 wctx = repo[None]
144 144 merge = len(wctx.parents()) > 1
145 145 if merge:
146 146 raise error.Abort(_('cannot partially commit a merge '
147 147 '(use "hg commit" instead)'))
148 148
149 149 def fail(f, msg):
150 150 raise error.Abort('%s: %s' % (f, msg))
151 151
152 152 force = opts.get('force')
153 153 if not force:
154 154 vdirs = []
155 155 match.explicitdir = vdirs.append
156 156 match.bad = fail
157 157
158 158 status = repo.status(match=match)
159 159 if not force:
160 160 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
161 161 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
162 162 diffopts.nodates = True
163 163 diffopts.git = True
164 164 diffopts.showfunc = True
165 165 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
166 166 originalchunks = patch.parsepatch(originaldiff)
167 167
168 168 # 1. filter patch, since we are intending to apply subset of it
169 169 try:
170 170 chunks, newopts = filterfn(ui, originalchunks)
171 171 except patch.PatchError as err:
172 172 raise error.Abort(_('error parsing patch: %s') % err)
173 173 opts.update(newopts)
174 174
175 175 # We need to keep a backup of files that have been newly added and
176 176 # modified during the recording process because there is a previous
177 177 # version without the edit in the workdir
178 178 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
179 179 contenders = set()
180 180 for h in chunks:
181 181 try:
182 182 contenders.update(set(h.files()))
183 183 except AttributeError:
184 184 pass
185 185
186 186 changed = status.modified + status.added + status.removed
187 187 newfiles = [f for f in changed if f in contenders]
188 188 if not newfiles:
189 189 ui.status(_('no changes to record\n'))
190 190 return 0
191 191
192 192 modified = set(status.modified)
193 193
194 194 # 2. backup changed files, so we can restore them in the end
195 195
196 196 if backupall:
197 197 tobackup = changed
198 198 else:
199 199 tobackup = [f for f in newfiles if f in modified or f in \
200 200 newlyaddedandmodifiedfiles]
201 201 backups = {}
202 202 if tobackup:
203 203 backupdir = repo.join('record-backups')
204 204 try:
205 205 os.mkdir(backupdir)
206 206 except OSError as err:
207 207 if err.errno != errno.EEXIST:
208 208 raise
209 209 try:
210 210 # backup continues
211 211 for f in tobackup:
212 212 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
213 213 dir=backupdir)
214 214 os.close(fd)
215 215 ui.debug('backup %r as %r\n' % (f, tmpname))
216 216 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
217 217 backups[f] = tmpname
218 218
219 219 fp = stringio()
220 220 for c in chunks:
221 221 fname = c.filename()
222 222 if fname in backups:
223 223 c.write(fp)
224 224 dopatch = fp.tell()
225 225 fp.seek(0)
226 226
227 227 # 2.5 optionally review / modify patch in text editor
228 228 if opts.get('review', False):
229 229 patchtext = (crecordmod.diffhelptext
230 230 + crecordmod.patchhelptext
231 231 + fp.read())
232 232 reviewedpatch = ui.edit(patchtext, "",
233 233 extra={"suffix": ".diff"},
234 234 repopath=repo.path)
235 235 fp.truncate(0)
236 236 fp.write(reviewedpatch)
237 237 fp.seek(0)
238 238
239 239 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
240 240 # 3a. apply filtered patch to clean repo (clean)
241 241 if backups:
242 242 # Equivalent to hg.revert
243 243 m = scmutil.matchfiles(repo, backups.keys())
244 244 mergemod.update(repo, repo.dirstate.p1(),
245 245 False, True, matcher=m)
246 246
247 247 # 3b. (apply)
248 248 if dopatch:
249 249 try:
250 250 ui.debug('applying patch\n')
251 251 ui.debug(fp.getvalue())
252 252 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
253 253 except patch.PatchError as err:
254 254 raise error.Abort(str(err))
255 255 del fp
256 256
257 257 # 4. We prepared working directory according to filtered
258 258 # patch. Now is the time to delegate the job to
259 259 # commit/qrefresh or the like!
260 260
261 261 # Make all of the pathnames absolute.
262 262 newfiles = [repo.wjoin(nf) for nf in newfiles]
263 263 return commitfunc(ui, repo, *newfiles, **opts)
264 264 finally:
265 265 # 5. finally restore backed-up files
266 266 try:
267 267 dirstate = repo.dirstate
268 268 for realname, tmpname in backups.iteritems():
269 269 ui.debug('restoring %r to %r\n' % (tmpname, realname))
270 270
271 271 if dirstate[realname] == 'n':
272 272 # without normallookup, restoring timestamp
273 273 # may cause partially committed files
274 274 # to be treated as unmodified
275 275 dirstate.normallookup(realname)
276 276
277 277 # copystat=True here and above are a hack to trick any
278 278 # editors that have f open that we haven't modified them.
279 279 #
280 280 # Also note that this racy as an editor could notice the
281 281 # file's mtime before we've finished writing it.
282 282 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
283 283 os.unlink(tmpname)
284 284 if tobackup:
285 285 os.rmdir(backupdir)
286 286 except OSError:
287 287 pass
288 288
289 289 def recordinwlock(ui, repo, message, match, opts):
290 290 with repo.wlock():
291 291 return recordfunc(ui, repo, message, match, opts)
292 292
293 293 return commit(ui, repo, recordinwlock, pats, opts)
294 294
295 295 def findpossible(cmd, table, strict=False):
296 296 """
297 297 Return cmd -> (aliases, command table entry)
298 298 for each matching command.
299 299 Return debug commands (or their aliases) only if no normal command matches.
300 300 """
301 301 choice = {}
302 302 debugchoice = {}
303 303
304 304 if cmd in table:
305 305 # short-circuit exact matches, "log" alias beats "^log|history"
306 306 keys = [cmd]
307 307 else:
308 308 keys = table.keys()
309 309
310 310 allcmds = []
311 311 for e in keys:
312 312 aliases = parsealiases(e)
313 313 allcmds.extend(aliases)
314 314 found = None
315 315 if cmd in aliases:
316 316 found = cmd
317 317 elif not strict:
318 318 for a in aliases:
319 319 if a.startswith(cmd):
320 320 found = a
321 321 break
322 322 if found is not None:
323 323 if aliases[0].startswith("debug") or found.startswith("debug"):
324 324 debugchoice[found] = (aliases, table[e])
325 325 else:
326 326 choice[found] = (aliases, table[e])
327 327
328 328 if not choice and debugchoice:
329 329 choice = debugchoice
330 330
331 331 return choice, allcmds
332 332
333 333 def findcmd(cmd, table, strict=True):
334 334 """Return (aliases, command table entry) for command string."""
335 335 choice, allcmds = findpossible(cmd, table, strict)
336 336
337 337 if cmd in choice:
338 338 return choice[cmd]
339 339
340 340 if len(choice) > 1:
341 341 clist = choice.keys()
342 342 clist.sort()
343 343 raise error.AmbiguousCommand(cmd, clist)
344 344
345 345 if choice:
346 346 return choice.values()[0]
347 347
348 348 raise error.UnknownCommand(cmd, allcmds)
349 349
350 350 def findrepo(p):
351 351 while not os.path.isdir(os.path.join(p, ".hg")):
352 352 oldp, p = p, os.path.dirname(p)
353 353 if p == oldp:
354 354 return None
355 355
356 356 return p
357 357
358 358 def bailifchanged(repo, merge=True, hint=None):
359 359 """ enforce the precondition that working directory must be clean.
360 360
361 361 'merge' can be set to false if a pending uncommitted merge should be
362 362 ignored (such as when 'update --check' runs).
363 363
364 364 'hint' is the usual hint given to Abort exception.
365 365 """
366 366
367 367 if merge and repo.dirstate.p2() != nullid:
368 368 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
369 369 modified, added, removed, deleted = repo.status()[:4]
370 370 if modified or added or removed or deleted:
371 371 raise error.Abort(_('uncommitted changes'), hint=hint)
372 372 ctx = repo[None]
373 373 for s in sorted(ctx.substate):
374 374 ctx.sub(s).bailifchanged(hint=hint)
375 375
376 376 def logmessage(ui, opts):
377 377 """ get the log message according to -m and -l option """
378 378 message = opts.get('message')
379 379 logfile = opts.get('logfile')
380 380
381 381 if message and logfile:
382 382 raise error.Abort(_('options --message and --logfile are mutually '
383 383 'exclusive'))
384 384 if not message and logfile:
385 385 try:
386 386 if logfile == '-':
387 387 message = ui.fin.read()
388 388 else:
389 389 message = '\n'.join(util.readfile(logfile).splitlines())
390 390 except IOError as inst:
391 391 raise error.Abort(_("can't read commit message '%s': %s") %
392 392 (logfile, inst.strerror))
393 393 return message
394 394
395 395 def mergeeditform(ctxorbool, baseformname):
396 396 """return appropriate editform name (referencing a committemplate)
397 397
398 398 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
399 399 merging is committed.
400 400
401 401 This returns baseformname with '.merge' appended if it is a merge,
402 402 otherwise '.normal' is appended.
403 403 """
404 404 if isinstance(ctxorbool, bool):
405 405 if ctxorbool:
406 406 return baseformname + ".merge"
407 407 elif 1 < len(ctxorbool.parents()):
408 408 return baseformname + ".merge"
409 409
410 410 return baseformname + ".normal"
411 411
412 412 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
413 413 editform='', **opts):
414 414 """get appropriate commit message editor according to '--edit' option
415 415
416 416 'finishdesc' is a function to be called with edited commit message
417 417 (= 'description' of the new changeset) just after editing, but
418 418 before checking empty-ness. It should return actual text to be
419 419 stored into history. This allows to change description before
420 420 storing.
421 421
422 422 'extramsg' is a extra message to be shown in the editor instead of
423 423 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
424 424 is automatically added.
425 425
426 426 'editform' is a dot-separated list of names, to distinguish
427 427 the purpose of commit text editing.
428 428
429 429 'getcommiteditor' returns 'commitforceeditor' regardless of
430 430 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
431 431 they are specific for usage in MQ.
432 432 """
433 433 if edit or finishdesc or extramsg:
434 434 return lambda r, c, s: commitforceeditor(r, c, s,
435 435 finishdesc=finishdesc,
436 436 extramsg=extramsg,
437 437 editform=editform)
438 438 elif editform:
439 439 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
440 440 else:
441 441 return commiteditor
442 442
443 443 def loglimit(opts):
444 444 """get the log limit according to option -l/--limit"""
445 445 limit = opts.get('limit')
446 446 if limit:
447 447 try:
448 448 limit = int(limit)
449 449 except ValueError:
450 450 raise error.Abort(_('limit must be a positive integer'))
451 451 if limit <= 0:
452 452 raise error.Abort(_('limit must be positive'))
453 453 else:
454 454 limit = None
455 455 return limit
456 456
457 457 def makefilename(repo, pat, node, desc=None,
458 458 total=None, seqno=None, revwidth=None, pathname=None):
459 459 node_expander = {
460 460 'H': lambda: hex(node),
461 461 'R': lambda: str(repo.changelog.rev(node)),
462 462 'h': lambda: short(node),
463 463 'm': lambda: re.sub('[^\w]', '_', str(desc))
464 464 }
465 465 expander = {
466 466 '%': lambda: '%',
467 467 'b': lambda: os.path.basename(repo.root),
468 468 }
469 469
470 470 try:
471 471 if node:
472 472 expander.update(node_expander)
473 473 if node:
474 474 expander['r'] = (lambda:
475 475 str(repo.changelog.rev(node)).zfill(revwidth or 0))
476 476 if total is not None:
477 477 expander['N'] = lambda: str(total)
478 478 if seqno is not None:
479 479 expander['n'] = lambda: str(seqno)
480 480 if total is not None and seqno is not None:
481 481 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
482 482 if pathname is not None:
483 483 expander['s'] = lambda: os.path.basename(pathname)
484 484 expander['d'] = lambda: os.path.dirname(pathname) or '.'
485 485 expander['p'] = lambda: pathname
486 486
487 487 newname = []
488 488 patlen = len(pat)
489 489 i = 0
490 490 while i < patlen:
491 491 c = pat[i]
492 492 if c == '%':
493 493 i += 1
494 494 c = pat[i]
495 495 c = expander[c]()
496 496 newname.append(c)
497 497 i += 1
498 498 return ''.join(newname)
499 499 except KeyError as inst:
500 500 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
501 501 inst.args[0])
502 502
503 503 class _unclosablefile(object):
504 504 def __init__(self, fp):
505 505 self._fp = fp
506 506
507 507 def close(self):
508 508 pass
509 509
510 510 def __iter__(self):
511 511 return iter(self._fp)
512 512
513 513 def __getattr__(self, attr):
514 514 return getattr(self._fp, attr)
515 515
516 516 def __enter__(self):
517 517 return self
518 518
519 519 def __exit__(self, exc_type, exc_value, exc_tb):
520 520 pass
521 521
522 522 def makefileobj(repo, pat, node=None, desc=None, total=None,
523 523 seqno=None, revwidth=None, mode='wb', modemap=None,
524 524 pathname=None):
525 525
526 526 writable = mode not in ('r', 'rb')
527 527
528 528 if not pat or pat == '-':
529 529 if writable:
530 530 fp = repo.ui.fout
531 531 else:
532 532 fp = repo.ui.fin
533 533 return _unclosablefile(fp)
534 534 if util.safehasattr(pat, 'write') and writable:
535 535 return pat
536 536 if util.safehasattr(pat, 'read') and 'r' in mode:
537 537 return pat
538 538 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
539 539 if modemap is not None:
540 540 mode = modemap.get(fn, mode)
541 541 if mode == 'wb':
542 542 modemap[fn] = 'ab'
543 543 return open(fn, mode)
544 544
545 545 def openrevlog(repo, cmd, file_, opts):
546 546 """opens the changelog, manifest, a filelog or a given revlog"""
547 547 cl = opts['changelog']
548 548 mf = opts['manifest']
549 549 dir = opts['dir']
550 550 msg = None
551 551 if cl and mf:
552 552 msg = _('cannot specify --changelog and --manifest at the same time')
553 553 elif cl and dir:
554 554 msg = _('cannot specify --changelog and --dir at the same time')
555 555 elif cl or mf or dir:
556 556 if file_:
557 557 msg = _('cannot specify filename with --changelog or --manifest')
558 558 elif not repo:
559 559 msg = _('cannot specify --changelog or --manifest or --dir '
560 560 'without a repository')
561 561 if msg:
562 562 raise error.Abort(msg)
563 563
564 564 r = None
565 565 if repo:
566 566 if cl:
567 567 r = repo.unfiltered().changelog
568 568 elif dir:
569 569 if 'treemanifest' not in repo.requirements:
570 570 raise error.Abort(_("--dir can only be used on repos with "
571 571 "treemanifest enabled"))
572 572 dirlog = repo.manifestlog._revlog.dirlog(dir)
573 573 if len(dirlog):
574 574 r = dirlog
575 575 elif mf:
576 576 r = repo.manifestlog._revlog
577 577 elif file_:
578 578 filelog = repo.file(file_)
579 579 if len(filelog):
580 580 r = filelog
581 581 if not r:
582 582 if not file_:
583 583 raise error.CommandError(cmd, _('invalid arguments'))
584 584 if not os.path.isfile(file_):
585 585 raise error.Abort(_("revlog '%s' not found") % file_)
586 586 r = revlog.revlog(scmutil.opener(pycompat.getcwd(), audit=False),
587 587 file_[:-2] + ".i")
588 588 return r
589 589
590 590 def copy(ui, repo, pats, opts, rename=False):
591 591 # called with the repo lock held
592 592 #
593 593 # hgsep => pathname that uses "/" to separate directories
594 594 # ossep => pathname that uses os.sep to separate directories
595 595 cwd = repo.getcwd()
596 596 targets = {}
597 597 after = opts.get("after")
598 598 dryrun = opts.get("dry_run")
599 599 wctx = repo[None]
600 600
601 601 def walkpat(pat):
602 602 srcs = []
603 603 if after:
604 604 badstates = '?'
605 605 else:
606 606 badstates = '?r'
607 607 m = scmutil.match(repo[None], [pat], opts, globbed=True)
608 608 for abs in repo.walk(m):
609 609 state = repo.dirstate[abs]
610 610 rel = m.rel(abs)
611 611 exact = m.exact(abs)
612 612 if state in badstates:
613 613 if exact and state == '?':
614 614 ui.warn(_('%s: not copying - file is not managed\n') % rel)
615 615 if exact and state == 'r':
616 616 ui.warn(_('%s: not copying - file has been marked for'
617 617 ' remove\n') % rel)
618 618 continue
619 619 # abs: hgsep
620 620 # rel: ossep
621 621 srcs.append((abs, rel, exact))
622 622 return srcs
623 623
624 624 # abssrc: hgsep
625 625 # relsrc: ossep
626 626 # otarget: ossep
627 627 def copyfile(abssrc, relsrc, otarget, exact):
628 628 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
629 629 if '/' in abstarget:
630 630 # We cannot normalize abstarget itself, this would prevent
631 631 # case only renames, like a => A.
632 632 abspath, absname = abstarget.rsplit('/', 1)
633 633 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
634 634 reltarget = repo.pathto(abstarget, cwd)
635 635 target = repo.wjoin(abstarget)
636 636 src = repo.wjoin(abssrc)
637 637 state = repo.dirstate[abstarget]
638 638
639 639 scmutil.checkportable(ui, abstarget)
640 640
641 641 # check for collisions
642 642 prevsrc = targets.get(abstarget)
643 643 if prevsrc is not None:
644 644 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
645 645 (reltarget, repo.pathto(abssrc, cwd),
646 646 repo.pathto(prevsrc, cwd)))
647 647 return
648 648
649 649 # check for overwrites
650 650 exists = os.path.lexists(target)
651 651 samefile = False
652 652 if exists and abssrc != abstarget:
653 653 if (repo.dirstate.normalize(abssrc) ==
654 654 repo.dirstate.normalize(abstarget)):
655 655 if not rename:
656 656 ui.warn(_("%s: can't copy - same file\n") % reltarget)
657 657 return
658 658 exists = False
659 659 samefile = True
660 660
661 661 if not after and exists or after and state in 'mn':
662 662 if not opts['force']:
663 663 if state in 'mn':
664 664 msg = _('%s: not overwriting - file already committed\n')
665 665 if after:
666 666 flags = '--after --force'
667 667 else:
668 668 flags = '--force'
669 669 if rename:
670 670 hint = _('(hg rename %s to replace the file by '
671 671 'recording a rename)\n') % flags
672 672 else:
673 673 hint = _('(hg copy %s to replace the file by '
674 674 'recording a copy)\n') % flags
675 675 else:
676 676 msg = _('%s: not overwriting - file exists\n')
677 677 if rename:
678 678 hint = _('(hg rename --after to record the rename)\n')
679 679 else:
680 680 hint = _('(hg copy --after to record the copy)\n')
681 681 ui.warn(msg % reltarget)
682 682 ui.warn(hint)
683 683 return
684 684
685 685 if after:
686 686 if not exists:
687 687 if rename:
688 688 ui.warn(_('%s: not recording move - %s does not exist\n') %
689 689 (relsrc, reltarget))
690 690 else:
691 691 ui.warn(_('%s: not recording copy - %s does not exist\n') %
692 692 (relsrc, reltarget))
693 693 return
694 694 elif not dryrun:
695 695 try:
696 696 if exists:
697 697 os.unlink(target)
698 698 targetdir = os.path.dirname(target) or '.'
699 699 if not os.path.isdir(targetdir):
700 700 os.makedirs(targetdir)
701 701 if samefile:
702 702 tmp = target + "~hgrename"
703 703 os.rename(src, tmp)
704 704 os.rename(tmp, target)
705 705 else:
706 706 util.copyfile(src, target)
707 707 srcexists = True
708 708 except IOError as inst:
709 709 if inst.errno == errno.ENOENT:
710 710 ui.warn(_('%s: deleted in working directory\n') % relsrc)
711 711 srcexists = False
712 712 else:
713 713 ui.warn(_('%s: cannot copy - %s\n') %
714 714 (relsrc, inst.strerror))
715 715 return True # report a failure
716 716
717 717 if ui.verbose or not exact:
718 718 if rename:
719 719 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
720 720 else:
721 721 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
722 722
723 723 targets[abstarget] = abssrc
724 724
725 725 # fix up dirstate
726 726 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
727 727 dryrun=dryrun, cwd=cwd)
728 728 if rename and not dryrun:
729 729 if not after and srcexists and not samefile:
730 730 util.unlinkpath(repo.wjoin(abssrc))
731 731 wctx.forget([abssrc])
732 732
733 733 # pat: ossep
734 734 # dest ossep
735 735 # srcs: list of (hgsep, hgsep, ossep, bool)
736 736 # return: function that takes hgsep and returns ossep
737 737 def targetpathfn(pat, dest, srcs):
738 738 if os.path.isdir(pat):
739 739 abspfx = pathutil.canonpath(repo.root, cwd, pat)
740 740 abspfx = util.localpath(abspfx)
741 741 if destdirexists:
742 742 striplen = len(os.path.split(abspfx)[0])
743 743 else:
744 744 striplen = len(abspfx)
745 745 if striplen:
746 746 striplen += len(pycompat.ossep)
747 747 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
748 748 elif destdirexists:
749 749 res = lambda p: os.path.join(dest,
750 750 os.path.basename(util.localpath(p)))
751 751 else:
752 752 res = lambda p: dest
753 753 return res
754 754
755 755 # pat: ossep
756 756 # dest ossep
757 757 # srcs: list of (hgsep, hgsep, ossep, bool)
758 758 # return: function that takes hgsep and returns ossep
759 759 def targetpathafterfn(pat, dest, srcs):
760 760 if matchmod.patkind(pat):
761 761 # a mercurial pattern
762 762 res = lambda p: os.path.join(dest,
763 763 os.path.basename(util.localpath(p)))
764 764 else:
765 765 abspfx = pathutil.canonpath(repo.root, cwd, pat)
766 766 if len(abspfx) < len(srcs[0][0]):
767 767 # A directory. Either the target path contains the last
768 768 # component of the source path or it does not.
769 769 def evalpath(striplen):
770 770 score = 0
771 771 for s in srcs:
772 772 t = os.path.join(dest, util.localpath(s[0])[striplen:])
773 773 if os.path.lexists(t):
774 774 score += 1
775 775 return score
776 776
777 777 abspfx = util.localpath(abspfx)
778 778 striplen = len(abspfx)
779 779 if striplen:
780 780 striplen += len(pycompat.ossep)
781 781 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
782 782 score = evalpath(striplen)
783 783 striplen1 = len(os.path.split(abspfx)[0])
784 784 if striplen1:
785 785 striplen1 += len(pycompat.ossep)
786 786 if evalpath(striplen1) > score:
787 787 striplen = striplen1
788 788 res = lambda p: os.path.join(dest,
789 789 util.localpath(p)[striplen:])
790 790 else:
791 791 # a file
792 792 if destdirexists:
793 793 res = lambda p: os.path.join(dest,
794 794 os.path.basename(util.localpath(p)))
795 795 else:
796 796 res = lambda p: dest
797 797 return res
798 798
799 799 pats = scmutil.expandpats(pats)
800 800 if not pats:
801 801 raise error.Abort(_('no source or destination specified'))
802 802 if len(pats) == 1:
803 803 raise error.Abort(_('no destination specified'))
804 804 dest = pats.pop()
805 805 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
806 806 if not destdirexists:
807 807 if len(pats) > 1 or matchmod.patkind(pats[0]):
808 808 raise error.Abort(_('with multiple sources, destination must be an '
809 809 'existing directory'))
810 810 if util.endswithsep(dest):
811 811 raise error.Abort(_('destination %s is not a directory') % dest)
812 812
813 813 tfn = targetpathfn
814 814 if after:
815 815 tfn = targetpathafterfn
816 816 copylist = []
817 817 for pat in pats:
818 818 srcs = walkpat(pat)
819 819 if not srcs:
820 820 continue
821 821 copylist.append((tfn(pat, dest, srcs), srcs))
822 822 if not copylist:
823 823 raise error.Abort(_('no files to copy'))
824 824
825 825 errors = 0
826 826 for targetpath, srcs in copylist:
827 827 for abssrc, relsrc, exact in srcs:
828 828 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
829 829 errors += 1
830 830
831 831 if errors:
832 832 ui.warn(_('(consider using --after)\n'))
833 833
834 834 return errors != 0
835 835
836 836 ## facility to let extension process additional data into an import patch
837 837 # list of identifier to be executed in order
838 838 extrapreimport = [] # run before commit
839 839 extrapostimport = [] # run after commit
840 840 # mapping from identifier to actual import function
841 841 #
842 842 # 'preimport' are run before the commit is made and are provided the following
843 843 # arguments:
844 844 # - repo: the localrepository instance,
845 845 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
846 846 # - extra: the future extra dictionary of the changeset, please mutate it,
847 847 # - opts: the import options.
848 848 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
849 849 # mutation of in memory commit and more. Feel free to rework the code to get
850 850 # there.
851 851 extrapreimportmap = {}
852 852 # 'postimport' are run after the commit is made and are provided the following
853 853 # argument:
854 854 # - ctx: the changectx created by import.
855 855 extrapostimportmap = {}
856 856
857 857 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
858 858 """Utility function used by commands.import to import a single patch
859 859
860 860 This function is explicitly defined here to help the evolve extension to
861 861 wrap this part of the import logic.
862 862
863 863 The API is currently a bit ugly because it a simple code translation from
864 864 the import command. Feel free to make it better.
865 865
866 866 :hunk: a patch (as a binary string)
867 867 :parents: nodes that will be parent of the created commit
868 868 :opts: the full dict of option passed to the import command
869 869 :msgs: list to save commit message to.
870 870 (used in case we need to save it when failing)
871 871 :updatefunc: a function that update a repo to a given node
872 872 updatefunc(<repo>, <node>)
873 873 """
874 874 # avoid cycle context -> subrepo -> cmdutil
875 875 from . import context
876 876 extractdata = patch.extract(ui, hunk)
877 877 tmpname = extractdata.get('filename')
878 878 message = extractdata.get('message')
879 879 user = opts.get('user') or extractdata.get('user')
880 880 date = opts.get('date') or extractdata.get('date')
881 881 branch = extractdata.get('branch')
882 882 nodeid = extractdata.get('nodeid')
883 883 p1 = extractdata.get('p1')
884 884 p2 = extractdata.get('p2')
885 885
886 886 nocommit = opts.get('no_commit')
887 887 importbranch = opts.get('import_branch')
888 888 update = not opts.get('bypass')
889 889 strip = opts["strip"]
890 890 prefix = opts["prefix"]
891 891 sim = float(opts.get('similarity') or 0)
892 892 if not tmpname:
893 893 return (None, None, False)
894 894
895 895 rejects = False
896 896
897 897 try:
898 898 cmdline_message = logmessage(ui, opts)
899 899 if cmdline_message:
900 900 # pickup the cmdline msg
901 901 message = cmdline_message
902 902 elif message:
903 903 # pickup the patch msg
904 904 message = message.strip()
905 905 else:
906 906 # launch the editor
907 907 message = None
908 908 ui.debug('message:\n%s\n' % message)
909 909
910 910 if len(parents) == 1:
911 911 parents.append(repo[nullid])
912 912 if opts.get('exact'):
913 913 if not nodeid or not p1:
914 914 raise error.Abort(_('not a Mercurial patch'))
915 915 p1 = repo[p1]
916 916 p2 = repo[p2 or nullid]
917 917 elif p2:
918 918 try:
919 919 p1 = repo[p1]
920 920 p2 = repo[p2]
921 921 # Without any options, consider p2 only if the
922 922 # patch is being applied on top of the recorded
923 923 # first parent.
924 924 if p1 != parents[0]:
925 925 p1 = parents[0]
926 926 p2 = repo[nullid]
927 927 except error.RepoError:
928 928 p1, p2 = parents
929 929 if p2.node() == nullid:
930 930 ui.warn(_("warning: import the patch as a normal revision\n"
931 931 "(use --exact to import the patch as a merge)\n"))
932 932 else:
933 933 p1, p2 = parents
934 934
935 935 n = None
936 936 if update:
937 937 if p1 != parents[0]:
938 938 updatefunc(repo, p1.node())
939 939 if p2 != parents[1]:
940 940 repo.setparents(p1.node(), p2.node())
941 941
942 942 if opts.get('exact') or importbranch:
943 943 repo.dirstate.setbranch(branch or 'default')
944 944
945 945 partial = opts.get('partial', False)
946 946 files = set()
947 947 try:
948 948 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
949 949 files=files, eolmode=None, similarity=sim / 100.0)
950 950 except patch.PatchError as e:
951 951 if not partial:
952 952 raise error.Abort(str(e))
953 953 if partial:
954 954 rejects = True
955 955
956 956 files = list(files)
957 957 if nocommit:
958 958 if message:
959 959 msgs.append(message)
960 960 else:
961 961 if opts.get('exact') or p2:
962 962 # If you got here, you either use --force and know what
963 963 # you are doing or used --exact or a merge patch while
964 964 # being updated to its first parent.
965 965 m = None
966 966 else:
967 967 m = scmutil.matchfiles(repo, files or [])
968 968 editform = mergeeditform(repo[None], 'import.normal')
969 969 if opts.get('exact'):
970 970 editor = None
971 971 else:
972 972 editor = getcommiteditor(editform=editform, **opts)
973 973 allowemptyback = repo.ui.backupconfig('ui', 'allowemptycommit')
974 974 extra = {}
975 975 for idfunc in extrapreimport:
976 976 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
977 977 try:
978 978 if partial:
979 979 repo.ui.setconfig('ui', 'allowemptycommit', True)
980 980 n = repo.commit(message, user,
981 981 date, match=m,
982 982 editor=editor, extra=extra)
983 983 for idfunc in extrapostimport:
984 984 extrapostimportmap[idfunc](repo[n])
985 985 finally:
986 986 repo.ui.restoreconfig(allowemptyback)
987 987 else:
988 988 if opts.get('exact') or importbranch:
989 989 branch = branch or 'default'
990 990 else:
991 991 branch = p1.branch()
992 992 store = patch.filestore()
993 993 try:
994 994 files = set()
995 995 try:
996 996 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
997 997 files, eolmode=None)
998 998 except patch.PatchError as e:
999 999 raise error.Abort(str(e))
1000 1000 if opts.get('exact'):
1001 1001 editor = None
1002 1002 else:
1003 1003 editor = getcommiteditor(editform='import.bypass')
1004 1004 memctx = context.makememctx(repo, (p1.node(), p2.node()),
1005 1005 message,
1006 1006 user,
1007 1007 date,
1008 1008 branch, files, store,
1009 1009 editor=editor)
1010 1010 n = memctx.commit()
1011 1011 finally:
1012 1012 store.close()
1013 1013 if opts.get('exact') and nocommit:
1014 1014 # --exact with --no-commit is still useful in that it does merge
1015 1015 # and branch bits
1016 1016 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1017 1017 elif opts.get('exact') and hex(n) != nodeid:
1018 1018 raise error.Abort(_('patch is damaged or loses information'))
1019 1019 msg = _('applied to working directory')
1020 1020 if n:
1021 1021 # i18n: refers to a short changeset id
1022 1022 msg = _('created %s') % short(n)
1023 1023 return (msg, n, rejects)
1024 1024 finally:
1025 1025 os.unlink(tmpname)
1026 1026
1027 1027 # facility to let extensions include additional data in an exported patch
1028 1028 # list of identifiers to be executed in order
1029 1029 extraexport = []
1030 1030 # mapping from identifier to actual export function
1031 1031 # function as to return a string to be added to the header or None
1032 1032 # it is given two arguments (sequencenumber, changectx)
1033 1033 extraexportmap = {}
1034 1034
1035 1035 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1036 1036 opts=None, match=None):
1037 1037 '''export changesets as hg patches.'''
1038 1038
1039 1039 total = len(revs)
1040 1040 revwidth = max([len(str(rev)) for rev in revs])
1041 1041 filemode = {}
1042 1042
1043 1043 def single(rev, seqno, fp):
1044 1044 ctx = repo[rev]
1045 1045 node = ctx.node()
1046 1046 parents = [p.node() for p in ctx.parents() if p]
1047 1047 branch = ctx.branch()
1048 1048 if switch_parent:
1049 1049 parents.reverse()
1050 1050
1051 1051 if parents:
1052 1052 prev = parents[0]
1053 1053 else:
1054 1054 prev = nullid
1055 1055
1056 1056 shouldclose = False
1057 1057 if not fp and len(template) > 0:
1058 1058 desc_lines = ctx.description().rstrip().split('\n')
1059 1059 desc = desc_lines[0] #Commit always has a first line.
1060 1060 fp = makefileobj(repo, template, node, desc=desc, total=total,
1061 1061 seqno=seqno, revwidth=revwidth, mode='wb',
1062 1062 modemap=filemode)
1063 1063 shouldclose = True
1064 1064 if fp and not getattr(fp, 'name', '<unnamed>').startswith('<'):
1065 1065 repo.ui.note("%s\n" % fp.name)
1066 1066
1067 1067 if not fp:
1068 1068 write = repo.ui.write
1069 1069 else:
1070 1070 def write(s, **kw):
1071 1071 fp.write(s)
1072 1072
1073 1073 write("# HG changeset patch\n")
1074 1074 write("# User %s\n" % ctx.user())
1075 1075 write("# Date %d %d\n" % ctx.date())
1076 1076 write("# %s\n" % util.datestr(ctx.date()))
1077 1077 if branch and branch != 'default':
1078 1078 write("# Branch %s\n" % branch)
1079 1079 write("# Node ID %s\n" % hex(node))
1080 1080 write("# Parent %s\n" % hex(prev))
1081 1081 if len(parents) > 1:
1082 1082 write("# Parent %s\n" % hex(parents[1]))
1083 1083
1084 1084 for headerid in extraexport:
1085 1085 header = extraexportmap[headerid](seqno, ctx)
1086 1086 if header is not None:
1087 1087 write('# %s\n' % header)
1088 1088 write(ctx.description().rstrip())
1089 1089 write("\n\n")
1090 1090
1091 1091 for chunk, label in patch.diffui(repo, prev, node, match, opts=opts):
1092 1092 write(chunk, label=label)
1093 1093
1094 1094 if shouldclose:
1095 1095 fp.close()
1096 1096
1097 1097 for seqno, rev in enumerate(revs):
1098 1098 single(rev, seqno + 1, fp)
1099 1099
1100 1100 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1101 1101 changes=None, stat=False, fp=None, prefix='',
1102 1102 root='', listsubrepos=False):
1103 1103 '''show diff or diffstat.'''
1104 1104 if fp is None:
1105 1105 write = ui.write
1106 1106 else:
1107 1107 def write(s, **kw):
1108 1108 fp.write(s)
1109 1109
1110 1110 if root:
1111 1111 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1112 1112 else:
1113 1113 relroot = ''
1114 1114 if relroot != '':
1115 1115 # XXX relative roots currently don't work if the root is within a
1116 1116 # subrepo
1117 1117 uirelroot = match.uipath(relroot)
1118 1118 relroot += '/'
1119 1119 for matchroot in match.files():
1120 1120 if not matchroot.startswith(relroot):
1121 1121 ui.warn(_('warning: %s not inside relative root %s\n') % (
1122 1122 match.uipath(matchroot), uirelroot))
1123 1123
1124 1124 if stat:
1125 1125 diffopts = diffopts.copy(context=0)
1126 1126 width = 80
1127 1127 if not ui.plain():
1128 1128 width = ui.termwidth()
1129 1129 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1130 1130 prefix=prefix, relroot=relroot)
1131 1131 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1132 1132 width=width):
1133 1133 write(chunk, label=label)
1134 1134 else:
1135 1135 for chunk, label in patch.diffui(repo, node1, node2, match,
1136 1136 changes, diffopts, prefix=prefix,
1137 1137 relroot=relroot):
1138 1138 write(chunk, label=label)
1139 1139
1140 1140 if listsubrepos:
1141 1141 ctx1 = repo[node1]
1142 1142 ctx2 = repo[node2]
1143 1143 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1144 1144 tempnode2 = node2
1145 1145 try:
1146 1146 if node2 is not None:
1147 1147 tempnode2 = ctx2.substate[subpath][1]
1148 1148 except KeyError:
1149 1149 # A subrepo that existed in node1 was deleted between node1 and
1150 1150 # node2 (inclusive). Thus, ctx2's substate won't contain that
1151 1151 # subpath. The best we can do is to ignore it.
1152 1152 tempnode2 = None
1153 1153 submatch = matchmod.subdirmatcher(subpath, match)
1154 1154 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1155 1155 stat=stat, fp=fp, prefix=prefix)
1156 1156
1157 1157 def _changesetlabels(ctx):
1158 1158 labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()]
1159 1159 if ctx.troubled():
1160 1160 labels.append('changeset.troubled')
1161 1161 for trouble in ctx.troubles():
1162 1162 labels.append('trouble.%s' % trouble)
1163 1163 return ' '.join(labels)
1164 1164
1165 1165 class changeset_printer(object):
1166 1166 '''show changeset information when templating not requested.'''
1167 1167
1168 1168 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1169 1169 self.ui = ui
1170 1170 self.repo = repo
1171 1171 self.buffered = buffered
1172 1172 self.matchfn = matchfn
1173 1173 self.diffopts = diffopts
1174 1174 self.header = {}
1175 1175 self.hunk = {}
1176 1176 self.lastheader = None
1177 1177 self.footer = None
1178 1178
1179 1179 def flush(self, ctx):
1180 1180 rev = ctx.rev()
1181 1181 if rev in self.header:
1182 1182 h = self.header[rev]
1183 1183 if h != self.lastheader:
1184 1184 self.lastheader = h
1185 1185 self.ui.write(h)
1186 1186 del self.header[rev]
1187 1187 if rev in self.hunk:
1188 1188 self.ui.write(self.hunk[rev])
1189 1189 del self.hunk[rev]
1190 1190 return 1
1191 1191 return 0
1192 1192
1193 1193 def close(self):
1194 1194 if self.footer:
1195 1195 self.ui.write(self.footer)
1196 1196
1197 1197 def show(self, ctx, copies=None, matchfn=None, **props):
1198 1198 if self.buffered:
1199 1199 self.ui.pushbuffer(labeled=True)
1200 1200 self._show(ctx, copies, matchfn, props)
1201 1201 self.hunk[ctx.rev()] = self.ui.popbuffer()
1202 1202 else:
1203 1203 self._show(ctx, copies, matchfn, props)
1204 1204
1205 1205 def _show(self, ctx, copies, matchfn, props):
1206 1206 '''show a single changeset or file revision'''
1207 1207 changenode = ctx.node()
1208 1208 rev = ctx.rev()
1209 1209 if self.ui.debugflag:
1210 1210 hexfunc = hex
1211 1211 else:
1212 1212 hexfunc = short
1213 1213 # as of now, wctx.node() and wctx.rev() return None, but we want to
1214 1214 # show the same values as {node} and {rev} templatekw
1215 1215 revnode = (scmutil.intrev(rev), hexfunc(bin(ctx.hex())))
1216 1216
1217 1217 if self.ui.quiet:
1218 1218 self.ui.write("%d:%s\n" % revnode, label='log.node')
1219 1219 return
1220 1220
1221 1221 date = util.datestr(ctx.date())
1222 1222
1223 1223 # i18n: column positioning for "hg log"
1224 1224 self.ui.write(_("changeset: %d:%s\n") % revnode,
1225 1225 label=_changesetlabels(ctx))
1226 1226
1227 1227 # branches are shown first before any other names due to backwards
1228 1228 # compatibility
1229 1229 branch = ctx.branch()
1230 1230 # don't show the default branch name
1231 1231 if branch != 'default':
1232 1232 # i18n: column positioning for "hg log"
1233 1233 self.ui.write(_("branch: %s\n") % branch,
1234 1234 label='log.branch')
1235 1235
1236 1236 for nsname, ns in self.repo.names.iteritems():
1237 1237 # branches has special logic already handled above, so here we just
1238 1238 # skip it
1239 1239 if nsname == 'branches':
1240 1240 continue
1241 1241 # we will use the templatename as the color name since those two
1242 1242 # should be the same
1243 1243 for name in ns.names(self.repo, changenode):
1244 1244 self.ui.write(ns.logfmt % name,
1245 1245 label='log.%s' % ns.colorname)
1246 1246 if self.ui.debugflag:
1247 1247 # i18n: column positioning for "hg log"
1248 1248 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1249 1249 label='log.phase')
1250 1250 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1251 1251 label = 'log.parent changeset.%s' % pctx.phasestr()
1252 1252 # i18n: column positioning for "hg log"
1253 1253 self.ui.write(_("parent: %d:%s\n")
1254 1254 % (pctx.rev(), hexfunc(pctx.node())),
1255 1255 label=label)
1256 1256
1257 1257 if self.ui.debugflag and rev is not None:
1258 1258 mnode = ctx.manifestnode()
1259 1259 # i18n: column positioning for "hg log"
1260 1260 self.ui.write(_("manifest: %d:%s\n") %
1261 1261 (self.repo.manifestlog._revlog.rev(mnode),
1262 1262 hex(mnode)),
1263 1263 label='ui.debug log.manifest')
1264 1264 # i18n: column positioning for "hg log"
1265 1265 self.ui.write(_("user: %s\n") % ctx.user(),
1266 1266 label='log.user')
1267 1267 # i18n: column positioning for "hg log"
1268 1268 self.ui.write(_("date: %s\n") % date,
1269 1269 label='log.date')
1270 1270
1271 1271 if ctx.troubled():
1272 1272 # i18n: column positioning for "hg log"
1273 1273 self.ui.write(_("trouble: %s\n") % ', '.join(ctx.troubles()),
1274 1274 label='log.trouble')
1275 1275
1276 1276 if self.ui.debugflag:
1277 1277 files = ctx.p1().status(ctx)[:3]
1278 1278 for key, value in zip([# i18n: column positioning for "hg log"
1279 1279 _("files:"),
1280 1280 # i18n: column positioning for "hg log"
1281 1281 _("files+:"),
1282 1282 # i18n: column positioning for "hg log"
1283 1283 _("files-:")], files):
1284 1284 if value:
1285 1285 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1286 1286 label='ui.debug log.files')
1287 1287 elif ctx.files() and self.ui.verbose:
1288 1288 # i18n: column positioning for "hg log"
1289 1289 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1290 1290 label='ui.note log.files')
1291 1291 if copies and self.ui.verbose:
1292 1292 copies = ['%s (%s)' % c for c in copies]
1293 1293 # i18n: column positioning for "hg log"
1294 1294 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1295 1295 label='ui.note log.copies')
1296 1296
1297 1297 extra = ctx.extra()
1298 1298 if extra and self.ui.debugflag:
1299 1299 for key, value in sorted(extra.items()):
1300 1300 # i18n: column positioning for "hg log"
1301 1301 self.ui.write(_("extra: %s=%s\n")
1302 1302 % (key, value.encode('string_escape')),
1303 1303 label='ui.debug log.extra')
1304 1304
1305 1305 description = ctx.description().strip()
1306 1306 if description:
1307 1307 if self.ui.verbose:
1308 1308 self.ui.write(_("description:\n"),
1309 1309 label='ui.note log.description')
1310 1310 self.ui.write(description,
1311 1311 label='ui.note log.description')
1312 1312 self.ui.write("\n\n")
1313 1313 else:
1314 1314 # i18n: column positioning for "hg log"
1315 1315 self.ui.write(_("summary: %s\n") %
1316 1316 description.splitlines()[0],
1317 1317 label='log.summary')
1318 1318 self.ui.write("\n")
1319 1319
1320 1320 self.showpatch(ctx, matchfn)
1321 1321
1322 1322 def showpatch(self, ctx, matchfn):
1323 1323 if not matchfn:
1324 1324 matchfn = self.matchfn
1325 1325 if matchfn:
1326 1326 stat = self.diffopts.get('stat')
1327 1327 diff = self.diffopts.get('patch')
1328 1328 diffopts = patch.diffallopts(self.ui, self.diffopts)
1329 1329 node = ctx.node()
1330 1330 prev = ctx.p1().node()
1331 1331 if stat:
1332 1332 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1333 1333 match=matchfn, stat=True)
1334 1334 if diff:
1335 1335 if stat:
1336 1336 self.ui.write("\n")
1337 1337 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1338 1338 match=matchfn, stat=False)
1339 1339 self.ui.write("\n")
1340 1340
1341 1341 class jsonchangeset(changeset_printer):
1342 1342 '''format changeset information.'''
1343 1343
1344 1344 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1345 1345 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1346 1346 self.cache = {}
1347 1347 self._first = True
1348 1348
1349 1349 def close(self):
1350 1350 if not self._first:
1351 1351 self.ui.write("\n]\n")
1352 1352 else:
1353 1353 self.ui.write("[]\n")
1354 1354
1355 1355 def _show(self, ctx, copies, matchfn, props):
1356 1356 '''show a single changeset or file revision'''
1357 1357 rev = ctx.rev()
1358 1358 if rev is None:
1359 1359 jrev = jnode = 'null'
1360 1360 else:
1361 1361 jrev = str(rev)
1362 1362 jnode = '"%s"' % hex(ctx.node())
1363 1363 j = encoding.jsonescape
1364 1364
1365 1365 if self._first:
1366 1366 self.ui.write("[\n {")
1367 1367 self._first = False
1368 1368 else:
1369 1369 self.ui.write(",\n {")
1370 1370
1371 1371 if self.ui.quiet:
1372 1372 self.ui.write(('\n "rev": %s') % jrev)
1373 1373 self.ui.write((',\n "node": %s') % jnode)
1374 1374 self.ui.write('\n }')
1375 1375 return
1376 1376
1377 1377 self.ui.write(('\n "rev": %s') % jrev)
1378 1378 self.ui.write((',\n "node": %s') % jnode)
1379 1379 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1380 1380 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1381 1381 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1382 1382 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1383 1383 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1384 1384
1385 1385 self.ui.write((',\n "bookmarks": [%s]') %
1386 1386 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1387 1387 self.ui.write((',\n "tags": [%s]') %
1388 1388 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1389 1389 self.ui.write((',\n "parents": [%s]') %
1390 1390 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1391 1391
1392 1392 if self.ui.debugflag:
1393 1393 if rev is None:
1394 1394 jmanifestnode = 'null'
1395 1395 else:
1396 1396 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1397 1397 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1398 1398
1399 1399 self.ui.write((',\n "extra": {%s}') %
1400 1400 ", ".join('"%s": "%s"' % (j(k), j(v))
1401 1401 for k, v in ctx.extra().items()))
1402 1402
1403 1403 files = ctx.p1().status(ctx)
1404 1404 self.ui.write((',\n "modified": [%s]') %
1405 1405 ", ".join('"%s"' % j(f) for f in files[0]))
1406 1406 self.ui.write((',\n "added": [%s]') %
1407 1407 ", ".join('"%s"' % j(f) for f in files[1]))
1408 1408 self.ui.write((',\n "removed": [%s]') %
1409 1409 ", ".join('"%s"' % j(f) for f in files[2]))
1410 1410
1411 1411 elif self.ui.verbose:
1412 1412 self.ui.write((',\n "files": [%s]') %
1413 1413 ", ".join('"%s"' % j(f) for f in ctx.files()))
1414 1414
1415 1415 if copies:
1416 1416 self.ui.write((',\n "copies": {%s}') %
1417 1417 ", ".join('"%s": "%s"' % (j(k), j(v))
1418 1418 for k, v in copies))
1419 1419
1420 1420 matchfn = self.matchfn
1421 1421 if matchfn:
1422 1422 stat = self.diffopts.get('stat')
1423 1423 diff = self.diffopts.get('patch')
1424 1424 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1425 1425 node, prev = ctx.node(), ctx.p1().node()
1426 1426 if stat:
1427 1427 self.ui.pushbuffer()
1428 1428 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1429 1429 match=matchfn, stat=True)
1430 1430 self.ui.write((',\n "diffstat": "%s"')
1431 1431 % j(self.ui.popbuffer()))
1432 1432 if diff:
1433 1433 self.ui.pushbuffer()
1434 1434 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1435 1435 match=matchfn, stat=False)
1436 1436 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1437 1437
1438 1438 self.ui.write("\n }")
1439 1439
1440 1440 class changeset_templater(changeset_printer):
1441 1441 '''format changeset information.'''
1442 1442
1443 1443 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1444 1444 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1445 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
1446 filters = {'formatnode': formatnode}
1447 1445 defaulttempl = {
1448 1446 'parent': '{rev}:{node|formatnode} ',
1449 1447 'manifest': '{rev}:{node|formatnode}',
1450 1448 'file_copy': '{name} ({source})',
1451 1449 'envvar': '{key}={value}',
1452 1450 'extra': '{key}={value|stringescape}'
1453 1451 }
1454 1452 # filecopy is preserved for compatibility reasons
1455 1453 defaulttempl['filecopy'] = defaulttempl['file_copy']
1456 1454 assert not (tmpl and mapfile)
1457 1455 if mapfile:
1458 self.t = templater.templater.frommapfile(mapfile, filters=filters,
1456 self.t = templater.templater.frommapfile(mapfile,
1459 1457 cache=defaulttempl)
1460 1458 else:
1461 1459 self.t = formatter.maketemplater(ui, 'changeset', tmpl,
1462 filters=filters,
1463 1460 cache=defaulttempl)
1464 1461
1465 1462 self.cache = {}
1466 1463
1467 1464 # find correct templates for current mode
1468 1465 tmplmodes = [
1469 1466 (True, None),
1470 1467 (self.ui.verbose, 'verbose'),
1471 1468 (self.ui.quiet, 'quiet'),
1472 1469 (self.ui.debugflag, 'debug'),
1473 1470 ]
1474 1471
1475 1472 self._parts = {'header': '', 'footer': '', 'changeset': 'changeset',
1476 1473 'docheader': '', 'docfooter': ''}
1477 1474 for mode, postfix in tmplmodes:
1478 1475 for t in self._parts:
1479 1476 cur = t
1480 1477 if postfix:
1481 1478 cur += "_" + postfix
1482 1479 if mode and cur in self.t:
1483 1480 self._parts[t] = cur
1484 1481
1485 1482 if self._parts['docheader']:
1486 1483 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1487 1484
1488 1485 def close(self):
1489 1486 if self._parts['docfooter']:
1490 1487 if not self.footer:
1491 1488 self.footer = ""
1492 1489 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1493 1490 return super(changeset_templater, self).close()
1494 1491
1495 1492 def _show(self, ctx, copies, matchfn, props):
1496 1493 '''show a single changeset or file revision'''
1497 1494 props = props.copy()
1498 1495 props.update(templatekw.keywords)
1499 1496 props['templ'] = self.t
1500 1497 props['ctx'] = ctx
1501 1498 props['repo'] = self.repo
1502 1499 props['ui'] = self.repo.ui
1503 1500 props['revcache'] = {'copies': copies}
1504 1501 props['cache'] = self.cache
1505 1502
1506 1503 # write header
1507 1504 if self._parts['header']:
1508 1505 h = templater.stringify(self.t(self._parts['header'], **props))
1509 1506 if self.buffered:
1510 1507 self.header[ctx.rev()] = h
1511 1508 else:
1512 1509 if self.lastheader != h:
1513 1510 self.lastheader = h
1514 1511 self.ui.write(h)
1515 1512
1516 1513 # write changeset metadata, then patch if requested
1517 1514 key = self._parts['changeset']
1518 1515 self.ui.write(templater.stringify(self.t(key, **props)))
1519 1516 self.showpatch(ctx, matchfn)
1520 1517
1521 1518 if self._parts['footer']:
1522 1519 if not self.footer:
1523 1520 self.footer = templater.stringify(
1524 1521 self.t(self._parts['footer'], **props))
1525 1522
1526 1523 def gettemplate(ui, tmpl, style):
1527 1524 """
1528 1525 Find the template matching the given template spec or style.
1529 1526 """
1530 1527
1531 1528 # ui settings
1532 1529 if not tmpl and not style: # template are stronger than style
1533 1530 tmpl = ui.config('ui', 'logtemplate')
1534 1531 if tmpl:
1535 1532 return templater.unquotestring(tmpl), None
1536 1533 else:
1537 1534 style = util.expandpath(ui.config('ui', 'style', ''))
1538 1535
1539 1536 if not tmpl and style:
1540 1537 mapfile = style
1541 1538 if not os.path.split(mapfile)[0]:
1542 1539 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1543 1540 or templater.templatepath(mapfile))
1544 1541 if mapname:
1545 1542 mapfile = mapname
1546 1543 return None, mapfile
1547 1544
1548 1545 if not tmpl:
1549 1546 return None, None
1550 1547
1551 1548 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1552 1549
1553 1550 def show_changeset(ui, repo, opts, buffered=False):
1554 1551 """show one changeset using template or regular display.
1555 1552
1556 1553 Display format will be the first non-empty hit of:
1557 1554 1. option 'template'
1558 1555 2. option 'style'
1559 1556 3. [ui] setting 'logtemplate'
1560 1557 4. [ui] setting 'style'
1561 1558 If all of these values are either the unset or the empty string,
1562 1559 regular display via changeset_printer() is done.
1563 1560 """
1564 1561 # options
1565 1562 matchfn = None
1566 1563 if opts.get('patch') or opts.get('stat'):
1567 1564 matchfn = scmutil.matchall(repo)
1568 1565
1569 1566 if opts.get('template') == 'json':
1570 1567 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1571 1568
1572 1569 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1573 1570
1574 1571 if not tmpl and not mapfile:
1575 1572 return changeset_printer(ui, repo, matchfn, opts, buffered)
1576 1573
1577 1574 return changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile, buffered)
1578 1575
1579 1576 def showmarker(fm, marker, index=None):
1580 1577 """utility function to display obsolescence marker in a readable way
1581 1578
1582 1579 To be used by debug function."""
1583 1580 if index is not None:
1584 1581 fm.write('index', '%i ', index)
1585 1582 fm.write('precnode', '%s ', hex(marker.precnode()))
1586 1583 succs = marker.succnodes()
1587 1584 fm.condwrite(succs, 'succnodes', '%s ',
1588 1585 fm.formatlist(map(hex, succs), name='node'))
1589 1586 fm.write('flag', '%X ', marker.flags())
1590 1587 parents = marker.parentnodes()
1591 1588 if parents is not None:
1592 1589 fm.write('parentnodes', '{%s} ',
1593 1590 fm.formatlist(map(hex, parents), name='node', sep=', '))
1594 1591 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1595 1592 meta = marker.metadata().copy()
1596 1593 meta.pop('date', None)
1597 1594 fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
1598 1595 fm.plain('\n')
1599 1596
1600 1597 def finddate(ui, repo, date):
1601 1598 """Find the tipmost changeset that matches the given date spec"""
1602 1599
1603 1600 df = util.matchdate(date)
1604 1601 m = scmutil.matchall(repo)
1605 1602 results = {}
1606 1603
1607 1604 def prep(ctx, fns):
1608 1605 d = ctx.date()
1609 1606 if df(d[0]):
1610 1607 results[ctx.rev()] = d
1611 1608
1612 1609 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1613 1610 rev = ctx.rev()
1614 1611 if rev in results:
1615 1612 ui.status(_("found revision %s from %s\n") %
1616 1613 (rev, util.datestr(results[rev])))
1617 1614 return str(rev)
1618 1615
1619 1616 raise error.Abort(_("revision matching date not found"))
1620 1617
1621 1618 def increasingwindows(windowsize=8, sizelimit=512):
1622 1619 while True:
1623 1620 yield windowsize
1624 1621 if windowsize < sizelimit:
1625 1622 windowsize *= 2
1626 1623
1627 1624 class FileWalkError(Exception):
1628 1625 pass
1629 1626
1630 1627 def walkfilerevs(repo, match, follow, revs, fncache):
1631 1628 '''Walks the file history for the matched files.
1632 1629
1633 1630 Returns the changeset revs that are involved in the file history.
1634 1631
1635 1632 Throws FileWalkError if the file history can't be walked using
1636 1633 filelogs alone.
1637 1634 '''
1638 1635 wanted = set()
1639 1636 copies = []
1640 1637 minrev, maxrev = min(revs), max(revs)
1641 1638 def filerevgen(filelog, last):
1642 1639 """
1643 1640 Only files, no patterns. Check the history of each file.
1644 1641
1645 1642 Examines filelog entries within minrev, maxrev linkrev range
1646 1643 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1647 1644 tuples in backwards order
1648 1645 """
1649 1646 cl_count = len(repo)
1650 1647 revs = []
1651 1648 for j in xrange(0, last + 1):
1652 1649 linkrev = filelog.linkrev(j)
1653 1650 if linkrev < minrev:
1654 1651 continue
1655 1652 # only yield rev for which we have the changelog, it can
1656 1653 # happen while doing "hg log" during a pull or commit
1657 1654 if linkrev >= cl_count:
1658 1655 break
1659 1656
1660 1657 parentlinkrevs = []
1661 1658 for p in filelog.parentrevs(j):
1662 1659 if p != nullrev:
1663 1660 parentlinkrevs.append(filelog.linkrev(p))
1664 1661 n = filelog.node(j)
1665 1662 revs.append((linkrev, parentlinkrevs,
1666 1663 follow and filelog.renamed(n)))
1667 1664
1668 1665 return reversed(revs)
1669 1666 def iterfiles():
1670 1667 pctx = repo['.']
1671 1668 for filename in match.files():
1672 1669 if follow:
1673 1670 if filename not in pctx:
1674 1671 raise error.Abort(_('cannot follow file not in parent '
1675 1672 'revision: "%s"') % filename)
1676 1673 yield filename, pctx[filename].filenode()
1677 1674 else:
1678 1675 yield filename, None
1679 1676 for filename_node in copies:
1680 1677 yield filename_node
1681 1678
1682 1679 for file_, node in iterfiles():
1683 1680 filelog = repo.file(file_)
1684 1681 if not len(filelog):
1685 1682 if node is None:
1686 1683 # A zero count may be a directory or deleted file, so
1687 1684 # try to find matching entries on the slow path.
1688 1685 if follow:
1689 1686 raise error.Abort(
1690 1687 _('cannot follow nonexistent file: "%s"') % file_)
1691 1688 raise FileWalkError("Cannot walk via filelog")
1692 1689 else:
1693 1690 continue
1694 1691
1695 1692 if node is None:
1696 1693 last = len(filelog) - 1
1697 1694 else:
1698 1695 last = filelog.rev(node)
1699 1696
1700 1697 # keep track of all ancestors of the file
1701 1698 ancestors = set([filelog.linkrev(last)])
1702 1699
1703 1700 # iterate from latest to oldest revision
1704 1701 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1705 1702 if not follow:
1706 1703 if rev > maxrev:
1707 1704 continue
1708 1705 else:
1709 1706 # Note that last might not be the first interesting
1710 1707 # rev to us:
1711 1708 # if the file has been changed after maxrev, we'll
1712 1709 # have linkrev(last) > maxrev, and we still need
1713 1710 # to explore the file graph
1714 1711 if rev not in ancestors:
1715 1712 continue
1716 1713 # XXX insert 1327 fix here
1717 1714 if flparentlinkrevs:
1718 1715 ancestors.update(flparentlinkrevs)
1719 1716
1720 1717 fncache.setdefault(rev, []).append(file_)
1721 1718 wanted.add(rev)
1722 1719 if copied:
1723 1720 copies.append(copied)
1724 1721
1725 1722 return wanted
1726 1723
1727 1724 class _followfilter(object):
1728 1725 def __init__(self, repo, onlyfirst=False):
1729 1726 self.repo = repo
1730 1727 self.startrev = nullrev
1731 1728 self.roots = set()
1732 1729 self.onlyfirst = onlyfirst
1733 1730
1734 1731 def match(self, rev):
1735 1732 def realparents(rev):
1736 1733 if self.onlyfirst:
1737 1734 return self.repo.changelog.parentrevs(rev)[0:1]
1738 1735 else:
1739 1736 return filter(lambda x: x != nullrev,
1740 1737 self.repo.changelog.parentrevs(rev))
1741 1738
1742 1739 if self.startrev == nullrev:
1743 1740 self.startrev = rev
1744 1741 return True
1745 1742
1746 1743 if rev > self.startrev:
1747 1744 # forward: all descendants
1748 1745 if not self.roots:
1749 1746 self.roots.add(self.startrev)
1750 1747 for parent in realparents(rev):
1751 1748 if parent in self.roots:
1752 1749 self.roots.add(rev)
1753 1750 return True
1754 1751 else:
1755 1752 # backwards: all parents
1756 1753 if not self.roots:
1757 1754 self.roots.update(realparents(self.startrev))
1758 1755 if rev in self.roots:
1759 1756 self.roots.remove(rev)
1760 1757 self.roots.update(realparents(rev))
1761 1758 return True
1762 1759
1763 1760 return False
1764 1761
1765 1762 def walkchangerevs(repo, match, opts, prepare):
1766 1763 '''Iterate over files and the revs in which they changed.
1767 1764
1768 1765 Callers most commonly need to iterate backwards over the history
1769 1766 in which they are interested. Doing so has awful (quadratic-looking)
1770 1767 performance, so we use iterators in a "windowed" way.
1771 1768
1772 1769 We walk a window of revisions in the desired order. Within the
1773 1770 window, we first walk forwards to gather data, then in the desired
1774 1771 order (usually backwards) to display it.
1775 1772
1776 1773 This function returns an iterator yielding contexts. Before
1777 1774 yielding each context, the iterator will first call the prepare
1778 1775 function on each context in the window in forward order.'''
1779 1776
1780 1777 follow = opts.get('follow') or opts.get('follow_first')
1781 1778 revs = _logrevs(repo, opts)
1782 1779 if not revs:
1783 1780 return []
1784 1781 wanted = set()
1785 1782 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1786 1783 opts.get('removed'))
1787 1784 fncache = {}
1788 1785 change = repo.changectx
1789 1786
1790 1787 # First step is to fill wanted, the set of revisions that we want to yield.
1791 1788 # When it does not induce extra cost, we also fill fncache for revisions in
1792 1789 # wanted: a cache of filenames that were changed (ctx.files()) and that
1793 1790 # match the file filtering conditions.
1794 1791
1795 1792 if match.always():
1796 1793 # No files, no patterns. Display all revs.
1797 1794 wanted = revs
1798 1795 elif not slowpath:
1799 1796 # We only have to read through the filelog to find wanted revisions
1800 1797
1801 1798 try:
1802 1799 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1803 1800 except FileWalkError:
1804 1801 slowpath = True
1805 1802
1806 1803 # We decided to fall back to the slowpath because at least one
1807 1804 # of the paths was not a file. Check to see if at least one of them
1808 1805 # existed in history, otherwise simply return
1809 1806 for path in match.files():
1810 1807 if path == '.' or path in repo.store:
1811 1808 break
1812 1809 else:
1813 1810 return []
1814 1811
1815 1812 if slowpath:
1816 1813 # We have to read the changelog to match filenames against
1817 1814 # changed files
1818 1815
1819 1816 if follow:
1820 1817 raise error.Abort(_('can only follow copies/renames for explicit '
1821 1818 'filenames'))
1822 1819
1823 1820 # The slow path checks files modified in every changeset.
1824 1821 # This is really slow on large repos, so compute the set lazily.
1825 1822 class lazywantedset(object):
1826 1823 def __init__(self):
1827 1824 self.set = set()
1828 1825 self.revs = set(revs)
1829 1826
1830 1827 # No need to worry about locality here because it will be accessed
1831 1828 # in the same order as the increasing window below.
1832 1829 def __contains__(self, value):
1833 1830 if value in self.set:
1834 1831 return True
1835 1832 elif not value in self.revs:
1836 1833 return False
1837 1834 else:
1838 1835 self.revs.discard(value)
1839 1836 ctx = change(value)
1840 1837 matches = filter(match, ctx.files())
1841 1838 if matches:
1842 1839 fncache[value] = matches
1843 1840 self.set.add(value)
1844 1841 return True
1845 1842 return False
1846 1843
1847 1844 def discard(self, value):
1848 1845 self.revs.discard(value)
1849 1846 self.set.discard(value)
1850 1847
1851 1848 wanted = lazywantedset()
1852 1849
1853 1850 # it might be worthwhile to do this in the iterator if the rev range
1854 1851 # is descending and the prune args are all within that range
1855 1852 for rev in opts.get('prune', ()):
1856 1853 rev = repo[rev].rev()
1857 1854 ff = _followfilter(repo)
1858 1855 stop = min(revs[0], revs[-1])
1859 1856 for x in xrange(rev, stop - 1, -1):
1860 1857 if ff.match(x):
1861 1858 wanted = wanted - [x]
1862 1859
1863 1860 # Now that wanted is correctly initialized, we can iterate over the
1864 1861 # revision range, yielding only revisions in wanted.
1865 1862 def iterate():
1866 1863 if follow and match.always():
1867 1864 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1868 1865 def want(rev):
1869 1866 return ff.match(rev) and rev in wanted
1870 1867 else:
1871 1868 def want(rev):
1872 1869 return rev in wanted
1873 1870
1874 1871 it = iter(revs)
1875 1872 stopiteration = False
1876 1873 for windowsize in increasingwindows():
1877 1874 nrevs = []
1878 1875 for i in xrange(windowsize):
1879 1876 rev = next(it, None)
1880 1877 if rev is None:
1881 1878 stopiteration = True
1882 1879 break
1883 1880 elif want(rev):
1884 1881 nrevs.append(rev)
1885 1882 for rev in sorted(nrevs):
1886 1883 fns = fncache.get(rev)
1887 1884 ctx = change(rev)
1888 1885 if not fns:
1889 1886 def fns_generator():
1890 1887 for f in ctx.files():
1891 1888 if match(f):
1892 1889 yield f
1893 1890 fns = fns_generator()
1894 1891 prepare(ctx, fns)
1895 1892 for rev in nrevs:
1896 1893 yield change(rev)
1897 1894
1898 1895 if stopiteration:
1899 1896 break
1900 1897
1901 1898 return iterate()
1902 1899
1903 1900 def _makefollowlogfilematcher(repo, files, followfirst):
1904 1901 # When displaying a revision with --patch --follow FILE, we have
1905 1902 # to know which file of the revision must be diffed. With
1906 1903 # --follow, we want the names of the ancestors of FILE in the
1907 1904 # revision, stored in "fcache". "fcache" is populated by
1908 1905 # reproducing the graph traversal already done by --follow revset
1909 1906 # and relating revs to file names (which is not "correct" but
1910 1907 # good enough).
1911 1908 fcache = {}
1912 1909 fcacheready = [False]
1913 1910 pctx = repo['.']
1914 1911
1915 1912 def populate():
1916 1913 for fn in files:
1917 1914 fctx = pctx[fn]
1918 1915 fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
1919 1916 for c in fctx.ancestors(followfirst=followfirst):
1920 1917 fcache.setdefault(c.rev(), set()).add(c.path())
1921 1918
1922 1919 def filematcher(rev):
1923 1920 if not fcacheready[0]:
1924 1921 # Lazy initialization
1925 1922 fcacheready[0] = True
1926 1923 populate()
1927 1924 return scmutil.matchfiles(repo, fcache.get(rev, []))
1928 1925
1929 1926 return filematcher
1930 1927
1931 1928 def _makenofollowlogfilematcher(repo, pats, opts):
1932 1929 '''hook for extensions to override the filematcher for non-follow cases'''
1933 1930 return None
1934 1931
1935 1932 def _makelogrevset(repo, pats, opts, revs):
1936 1933 """Return (expr, filematcher) where expr is a revset string built
1937 1934 from log options and file patterns or None. If --stat or --patch
1938 1935 are not passed filematcher is None. Otherwise it is a callable
1939 1936 taking a revision number and returning a match objects filtering
1940 1937 the files to be detailed when displaying the revision.
1941 1938 """
1942 1939 opt2revset = {
1943 1940 'no_merges': ('not merge()', None),
1944 1941 'only_merges': ('merge()', None),
1945 1942 '_ancestors': ('ancestors(%(val)s)', None),
1946 1943 '_fancestors': ('_firstancestors(%(val)s)', None),
1947 1944 '_descendants': ('descendants(%(val)s)', None),
1948 1945 '_fdescendants': ('_firstdescendants(%(val)s)', None),
1949 1946 '_matchfiles': ('_matchfiles(%(val)s)', None),
1950 1947 'date': ('date(%(val)r)', None),
1951 1948 'branch': ('branch(%(val)r)', ' or '),
1952 1949 '_patslog': ('filelog(%(val)r)', ' or '),
1953 1950 '_patsfollow': ('follow(%(val)r)', ' or '),
1954 1951 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
1955 1952 'keyword': ('keyword(%(val)r)', ' or '),
1956 1953 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
1957 1954 'user': ('user(%(val)r)', ' or '),
1958 1955 }
1959 1956
1960 1957 opts = dict(opts)
1961 1958 # follow or not follow?
1962 1959 follow = opts.get('follow') or opts.get('follow_first')
1963 1960 if opts.get('follow_first'):
1964 1961 followfirst = 1
1965 1962 else:
1966 1963 followfirst = 0
1967 1964 # --follow with FILE behavior depends on revs...
1968 1965 it = iter(revs)
1969 1966 startrev = next(it)
1970 1967 followdescendants = startrev < next(it, startrev)
1971 1968
1972 1969 # branch and only_branch are really aliases and must be handled at
1973 1970 # the same time
1974 1971 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
1975 1972 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
1976 1973 # pats/include/exclude are passed to match.match() directly in
1977 1974 # _matchfiles() revset but walkchangerevs() builds its matcher with
1978 1975 # scmutil.match(). The difference is input pats are globbed on
1979 1976 # platforms without shell expansion (windows).
1980 1977 wctx = repo[None]
1981 1978 match, pats = scmutil.matchandpats(wctx, pats, opts)
1982 1979 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1983 1980 opts.get('removed'))
1984 1981 if not slowpath:
1985 1982 for f in match.files():
1986 1983 if follow and f not in wctx:
1987 1984 # If the file exists, it may be a directory, so let it
1988 1985 # take the slow path.
1989 1986 if os.path.exists(repo.wjoin(f)):
1990 1987 slowpath = True
1991 1988 continue
1992 1989 else:
1993 1990 raise error.Abort(_('cannot follow file not in parent '
1994 1991 'revision: "%s"') % f)
1995 1992 filelog = repo.file(f)
1996 1993 if not filelog:
1997 1994 # A zero count may be a directory or deleted file, so
1998 1995 # try to find matching entries on the slow path.
1999 1996 if follow:
2000 1997 raise error.Abort(
2001 1998 _('cannot follow nonexistent file: "%s"') % f)
2002 1999 slowpath = True
2003 2000
2004 2001 # We decided to fall back to the slowpath because at least one
2005 2002 # of the paths was not a file. Check to see if at least one of them
2006 2003 # existed in history - in that case, we'll continue down the
2007 2004 # slowpath; otherwise, we can turn off the slowpath
2008 2005 if slowpath:
2009 2006 for path in match.files():
2010 2007 if path == '.' or path in repo.store:
2011 2008 break
2012 2009 else:
2013 2010 slowpath = False
2014 2011
2015 2012 fpats = ('_patsfollow', '_patsfollowfirst')
2016 2013 fnopats = (('_ancestors', '_fancestors'),
2017 2014 ('_descendants', '_fdescendants'))
2018 2015 if slowpath:
2019 2016 # See walkchangerevs() slow path.
2020 2017 #
2021 2018 # pats/include/exclude cannot be represented as separate
2022 2019 # revset expressions as their filtering logic applies at file
2023 2020 # level. For instance "-I a -X a" matches a revision touching
2024 2021 # "a" and "b" while "file(a) and not file(b)" does
2025 2022 # not. Besides, filesets are evaluated against the working
2026 2023 # directory.
2027 2024 matchargs = ['r:', 'd:relpath']
2028 2025 for p in pats:
2029 2026 matchargs.append('p:' + p)
2030 2027 for p in opts.get('include', []):
2031 2028 matchargs.append('i:' + p)
2032 2029 for p in opts.get('exclude', []):
2033 2030 matchargs.append('x:' + p)
2034 2031 matchargs = ','.join(('%r' % p) for p in matchargs)
2035 2032 opts['_matchfiles'] = matchargs
2036 2033 if follow:
2037 2034 opts[fnopats[0][followfirst]] = '.'
2038 2035 else:
2039 2036 if follow:
2040 2037 if pats:
2041 2038 # follow() revset interprets its file argument as a
2042 2039 # manifest entry, so use match.files(), not pats.
2043 2040 opts[fpats[followfirst]] = list(match.files())
2044 2041 else:
2045 2042 op = fnopats[followdescendants][followfirst]
2046 2043 opts[op] = 'rev(%d)' % startrev
2047 2044 else:
2048 2045 opts['_patslog'] = list(pats)
2049 2046
2050 2047 filematcher = None
2051 2048 if opts.get('patch') or opts.get('stat'):
2052 2049 # When following files, track renames via a special matcher.
2053 2050 # If we're forced to take the slowpath it means we're following
2054 2051 # at least one pattern/directory, so don't bother with rename tracking.
2055 2052 if follow and not match.always() and not slowpath:
2056 2053 # _makefollowlogfilematcher expects its files argument to be
2057 2054 # relative to the repo root, so use match.files(), not pats.
2058 2055 filematcher = _makefollowlogfilematcher(repo, match.files(),
2059 2056 followfirst)
2060 2057 else:
2061 2058 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2062 2059 if filematcher is None:
2063 2060 filematcher = lambda rev: match
2064 2061
2065 2062 expr = []
2066 2063 for op, val in sorted(opts.iteritems()):
2067 2064 if not val:
2068 2065 continue
2069 2066 if op not in opt2revset:
2070 2067 continue
2071 2068 revop, andor = opt2revset[op]
2072 2069 if '%(val)' not in revop:
2073 2070 expr.append(revop)
2074 2071 else:
2075 2072 if not isinstance(val, list):
2076 2073 e = revop % {'val': val}
2077 2074 else:
2078 2075 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2079 2076 expr.append(e)
2080 2077
2081 2078 if expr:
2082 2079 expr = '(' + ' and '.join(expr) + ')'
2083 2080 else:
2084 2081 expr = None
2085 2082 return expr, filematcher
2086 2083
2087 2084 def _logrevs(repo, opts):
2088 2085 # Default --rev value depends on --follow but --follow behavior
2089 2086 # depends on revisions resolved from --rev...
2090 2087 follow = opts.get('follow') or opts.get('follow_first')
2091 2088 if opts.get('rev'):
2092 2089 revs = scmutil.revrange(repo, opts['rev'])
2093 2090 elif follow and repo.dirstate.p1() == nullid:
2094 2091 revs = smartset.baseset()
2095 2092 elif follow:
2096 2093 revs = repo.revs('reverse(:.)')
2097 2094 else:
2098 2095 revs = smartset.spanset(repo)
2099 2096 revs.reverse()
2100 2097 return revs
2101 2098
2102 2099 def getgraphlogrevs(repo, pats, opts):
2103 2100 """Return (revs, expr, filematcher) where revs is an iterable of
2104 2101 revision numbers, expr is a revset string built from log options
2105 2102 and file patterns or None, and used to filter 'revs'. If --stat or
2106 2103 --patch are not passed filematcher is None. Otherwise it is a
2107 2104 callable taking a revision number and returning a match objects
2108 2105 filtering the files to be detailed when displaying the revision.
2109 2106 """
2110 2107 limit = loglimit(opts)
2111 2108 revs = _logrevs(repo, opts)
2112 2109 if not revs:
2113 2110 return smartset.baseset(), None, None
2114 2111 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2115 2112 if opts.get('rev'):
2116 2113 # User-specified revs might be unsorted, but don't sort before
2117 2114 # _makelogrevset because it might depend on the order of revs
2118 2115 if not (revs.isdescending() or revs.istopo()):
2119 2116 revs.sort(reverse=True)
2120 2117 if expr:
2121 2118 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2122 2119 revs = matcher(repo, revs)
2123 2120 if limit is not None:
2124 2121 limitedrevs = []
2125 2122 for idx, rev in enumerate(revs):
2126 2123 if idx >= limit:
2127 2124 break
2128 2125 limitedrevs.append(rev)
2129 2126 revs = smartset.baseset(limitedrevs)
2130 2127
2131 2128 return revs, expr, filematcher
2132 2129
2133 2130 def getlogrevs(repo, pats, opts):
2134 2131 """Return (revs, expr, filematcher) where revs is an iterable of
2135 2132 revision numbers, expr is a revset string built from log options
2136 2133 and file patterns or None, and used to filter 'revs'. If --stat or
2137 2134 --patch are not passed filematcher is None. Otherwise it is a
2138 2135 callable taking a revision number and returning a match objects
2139 2136 filtering the files to be detailed when displaying the revision.
2140 2137 """
2141 2138 limit = loglimit(opts)
2142 2139 revs = _logrevs(repo, opts)
2143 2140 if not revs:
2144 2141 return smartset.baseset([]), None, None
2145 2142 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2146 2143 if expr:
2147 2144 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2148 2145 revs = matcher(repo, revs)
2149 2146 if limit is not None:
2150 2147 limitedrevs = []
2151 2148 for idx, r in enumerate(revs):
2152 2149 if limit <= idx:
2153 2150 break
2154 2151 limitedrevs.append(r)
2155 2152 revs = smartset.baseset(limitedrevs)
2156 2153
2157 2154 return revs, expr, filematcher
2158 2155
2159 2156 def _graphnodeformatter(ui, displayer):
2160 2157 spec = ui.config('ui', 'graphnodetemplate')
2161 2158 if not spec:
2162 2159 return templatekw.showgraphnode # fast path for "{graphnode}"
2163 2160
2164 2161 templ = formatter.gettemplater(ui, 'graphnode', spec)
2165 2162 cache = {}
2166 2163 if isinstance(displayer, changeset_templater):
2167 2164 cache = displayer.cache # reuse cache of slow templates
2168 2165 props = templatekw.keywords.copy()
2169 2166 props['templ'] = templ
2170 2167 props['cache'] = cache
2171 2168 def formatnode(repo, ctx):
2172 2169 props['ctx'] = ctx
2173 2170 props['repo'] = repo
2174 2171 props['ui'] = repo.ui
2175 2172 props['revcache'] = {}
2176 2173 return templater.stringify(templ('graphnode', **props))
2177 2174 return formatnode
2178 2175
2179 2176 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2180 2177 filematcher=None):
2181 2178 formatnode = _graphnodeformatter(ui, displayer)
2182 2179 state = graphmod.asciistate()
2183 2180 styles = state['styles']
2184 2181
2185 2182 # only set graph styling if HGPLAIN is not set.
2186 2183 if ui.plain('graph'):
2187 2184 # set all edge styles to |, the default pre-3.8 behaviour
2188 2185 styles.update(dict.fromkeys(styles, '|'))
2189 2186 else:
2190 2187 edgetypes = {
2191 2188 'parent': graphmod.PARENT,
2192 2189 'grandparent': graphmod.GRANDPARENT,
2193 2190 'missing': graphmod.MISSINGPARENT
2194 2191 }
2195 2192 for name, key in edgetypes.items():
2196 2193 # experimental config: experimental.graphstyle.*
2197 2194 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2198 2195 styles[key])
2199 2196 if not styles[key]:
2200 2197 styles[key] = None
2201 2198
2202 2199 # experimental config: experimental.graphshorten
2203 2200 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2204 2201
2205 2202 for rev, type, ctx, parents in dag:
2206 2203 char = formatnode(repo, ctx)
2207 2204 copies = None
2208 2205 if getrenamed and ctx.rev():
2209 2206 copies = []
2210 2207 for fn in ctx.files():
2211 2208 rename = getrenamed(fn, ctx.rev())
2212 2209 if rename:
2213 2210 copies.append((fn, rename[0]))
2214 2211 revmatchfn = None
2215 2212 if filematcher is not None:
2216 2213 revmatchfn = filematcher(ctx.rev())
2217 2214 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2218 2215 lines = displayer.hunk.pop(rev).split('\n')
2219 2216 if not lines[-1]:
2220 2217 del lines[-1]
2221 2218 displayer.flush(ctx)
2222 2219 edges = edgefn(type, char, lines, state, rev, parents)
2223 2220 for type, char, lines, coldata in edges:
2224 2221 graphmod.ascii(ui, state, type, char, lines, coldata)
2225 2222 displayer.close()
2226 2223
2227 2224 def graphlog(ui, repo, *pats, **opts):
2228 2225 # Parameters are identical to log command ones
2229 2226 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2230 2227 revdag = graphmod.dagwalker(repo, revs)
2231 2228
2232 2229 getrenamed = None
2233 2230 if opts.get('copies'):
2234 2231 endrev = None
2235 2232 if opts.get('rev'):
2236 2233 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2237 2234 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2238 2235
2239 2236 ui.pager('log')
2240 2237 displayer = show_changeset(ui, repo, opts, buffered=True)
2241 2238 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2242 2239 filematcher)
2243 2240
2244 2241 def checkunsupportedgraphflags(pats, opts):
2245 2242 for op in ["newest_first"]:
2246 2243 if op in opts and opts[op]:
2247 2244 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2248 2245 % op.replace("_", "-"))
2249 2246
2250 2247 def graphrevs(repo, nodes, opts):
2251 2248 limit = loglimit(opts)
2252 2249 nodes.reverse()
2253 2250 if limit is not None:
2254 2251 nodes = nodes[:limit]
2255 2252 return graphmod.nodes(repo, nodes)
2256 2253
2257 2254 def add(ui, repo, match, prefix, explicitonly, **opts):
2258 2255 join = lambda f: os.path.join(prefix, f)
2259 2256 bad = []
2260 2257
2261 2258 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2262 2259 names = []
2263 2260 wctx = repo[None]
2264 2261 cca = None
2265 2262 abort, warn = scmutil.checkportabilityalert(ui)
2266 2263 if abort or warn:
2267 2264 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2268 2265
2269 2266 badmatch = matchmod.badmatch(match, badfn)
2270 2267 dirstate = repo.dirstate
2271 2268 # We don't want to just call wctx.walk here, since it would return a lot of
2272 2269 # clean files, which we aren't interested in and takes time.
2273 2270 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2274 2271 True, False, full=False)):
2275 2272 exact = match.exact(f)
2276 2273 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2277 2274 if cca:
2278 2275 cca(f)
2279 2276 names.append(f)
2280 2277 if ui.verbose or not exact:
2281 2278 ui.status(_('adding %s\n') % match.rel(f))
2282 2279
2283 2280 for subpath in sorted(wctx.substate):
2284 2281 sub = wctx.sub(subpath)
2285 2282 try:
2286 2283 submatch = matchmod.subdirmatcher(subpath, match)
2287 2284 if opts.get('subrepos'):
2288 2285 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2289 2286 else:
2290 2287 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2291 2288 except error.LookupError:
2292 2289 ui.status(_("skipping missing subrepository: %s\n")
2293 2290 % join(subpath))
2294 2291
2295 2292 if not opts.get('dry_run'):
2296 2293 rejected = wctx.add(names, prefix)
2297 2294 bad.extend(f for f in rejected if f in match.files())
2298 2295 return bad
2299 2296
2300 2297 def forget(ui, repo, match, prefix, explicitonly):
2301 2298 join = lambda f: os.path.join(prefix, f)
2302 2299 bad = []
2303 2300 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2304 2301 wctx = repo[None]
2305 2302 forgot = []
2306 2303
2307 2304 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2308 2305 forget = sorted(s[0] + s[1] + s[3] + s[6])
2309 2306 if explicitonly:
2310 2307 forget = [f for f in forget if match.exact(f)]
2311 2308
2312 2309 for subpath in sorted(wctx.substate):
2313 2310 sub = wctx.sub(subpath)
2314 2311 try:
2315 2312 submatch = matchmod.subdirmatcher(subpath, match)
2316 2313 subbad, subforgot = sub.forget(submatch, prefix)
2317 2314 bad.extend([subpath + '/' + f for f in subbad])
2318 2315 forgot.extend([subpath + '/' + f for f in subforgot])
2319 2316 except error.LookupError:
2320 2317 ui.status(_("skipping missing subrepository: %s\n")
2321 2318 % join(subpath))
2322 2319
2323 2320 if not explicitonly:
2324 2321 for f in match.files():
2325 2322 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2326 2323 if f not in forgot:
2327 2324 if repo.wvfs.exists(f):
2328 2325 # Don't complain if the exact case match wasn't given.
2329 2326 # But don't do this until after checking 'forgot', so
2330 2327 # that subrepo files aren't normalized, and this op is
2331 2328 # purely from data cached by the status walk above.
2332 2329 if repo.dirstate.normalize(f) in repo.dirstate:
2333 2330 continue
2334 2331 ui.warn(_('not removing %s: '
2335 2332 'file is already untracked\n')
2336 2333 % match.rel(f))
2337 2334 bad.append(f)
2338 2335
2339 2336 for f in forget:
2340 2337 if ui.verbose or not match.exact(f):
2341 2338 ui.status(_('removing %s\n') % match.rel(f))
2342 2339
2343 2340 rejected = wctx.forget(forget, prefix)
2344 2341 bad.extend(f for f in rejected if f in match.files())
2345 2342 forgot.extend(f for f in forget if f not in rejected)
2346 2343 return bad, forgot
2347 2344
2348 2345 def files(ui, ctx, m, fm, fmt, subrepos):
2349 2346 rev = ctx.rev()
2350 2347 ret = 1
2351 2348 ds = ctx.repo().dirstate
2352 2349
2353 2350 for f in ctx.matches(m):
2354 2351 if rev is None and ds[f] == 'r':
2355 2352 continue
2356 2353 fm.startitem()
2357 2354 if ui.verbose:
2358 2355 fc = ctx[f]
2359 2356 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2360 2357 fm.data(abspath=f)
2361 2358 fm.write('path', fmt, m.rel(f))
2362 2359 ret = 0
2363 2360
2364 2361 for subpath in sorted(ctx.substate):
2365 2362 submatch = matchmod.subdirmatcher(subpath, m)
2366 2363 if (subrepos or m.exact(subpath) or any(submatch.files())):
2367 2364 sub = ctx.sub(subpath)
2368 2365 try:
2369 2366 recurse = m.exact(subpath) or subrepos
2370 2367 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2371 2368 ret = 0
2372 2369 except error.LookupError:
2373 2370 ui.status(_("skipping missing subrepository: %s\n")
2374 2371 % m.abs(subpath))
2375 2372
2376 2373 return ret
2377 2374
2378 2375 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2379 2376 join = lambda f: os.path.join(prefix, f)
2380 2377 ret = 0
2381 2378 s = repo.status(match=m, clean=True)
2382 2379 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2383 2380
2384 2381 wctx = repo[None]
2385 2382
2386 2383 if warnings is None:
2387 2384 warnings = []
2388 2385 warn = True
2389 2386 else:
2390 2387 warn = False
2391 2388
2392 2389 subs = sorted(wctx.substate)
2393 2390 total = len(subs)
2394 2391 count = 0
2395 2392 for subpath in subs:
2396 2393 count += 1
2397 2394 submatch = matchmod.subdirmatcher(subpath, m)
2398 2395 if subrepos or m.exact(subpath) or any(submatch.files()):
2399 2396 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2400 2397 sub = wctx.sub(subpath)
2401 2398 try:
2402 2399 if sub.removefiles(submatch, prefix, after, force, subrepos,
2403 2400 warnings):
2404 2401 ret = 1
2405 2402 except error.LookupError:
2406 2403 warnings.append(_("skipping missing subrepository: %s\n")
2407 2404 % join(subpath))
2408 2405 ui.progress(_('searching'), None)
2409 2406
2410 2407 # warn about failure to delete explicit files/dirs
2411 2408 deleteddirs = util.dirs(deleted)
2412 2409 files = m.files()
2413 2410 total = len(files)
2414 2411 count = 0
2415 2412 for f in files:
2416 2413 def insubrepo():
2417 2414 for subpath in wctx.substate:
2418 2415 if f.startswith(subpath + '/'):
2419 2416 return True
2420 2417 return False
2421 2418
2422 2419 count += 1
2423 2420 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2424 2421 isdir = f in deleteddirs or wctx.hasdir(f)
2425 2422 if (f in repo.dirstate or isdir or f == '.'
2426 2423 or insubrepo() or f in subs):
2427 2424 continue
2428 2425
2429 2426 if repo.wvfs.exists(f):
2430 2427 if repo.wvfs.isdir(f):
2431 2428 warnings.append(_('not removing %s: no tracked files\n')
2432 2429 % m.rel(f))
2433 2430 else:
2434 2431 warnings.append(_('not removing %s: file is untracked\n')
2435 2432 % m.rel(f))
2436 2433 # missing files will generate a warning elsewhere
2437 2434 ret = 1
2438 2435 ui.progress(_('deleting'), None)
2439 2436
2440 2437 if force:
2441 2438 list = modified + deleted + clean + added
2442 2439 elif after:
2443 2440 list = deleted
2444 2441 remaining = modified + added + clean
2445 2442 total = len(remaining)
2446 2443 count = 0
2447 2444 for f in remaining:
2448 2445 count += 1
2449 2446 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2450 2447 warnings.append(_('not removing %s: file still exists\n')
2451 2448 % m.rel(f))
2452 2449 ret = 1
2453 2450 ui.progress(_('skipping'), None)
2454 2451 else:
2455 2452 list = deleted + clean
2456 2453 total = len(modified) + len(added)
2457 2454 count = 0
2458 2455 for f in modified:
2459 2456 count += 1
2460 2457 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2461 2458 warnings.append(_('not removing %s: file is modified (use -f'
2462 2459 ' to force removal)\n') % m.rel(f))
2463 2460 ret = 1
2464 2461 for f in added:
2465 2462 count += 1
2466 2463 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2467 2464 warnings.append(_("not removing %s: file has been marked for add"
2468 2465 " (use 'hg forget' to undo add)\n") % m.rel(f))
2469 2466 ret = 1
2470 2467 ui.progress(_('skipping'), None)
2471 2468
2472 2469 list = sorted(list)
2473 2470 total = len(list)
2474 2471 count = 0
2475 2472 for f in list:
2476 2473 count += 1
2477 2474 if ui.verbose or not m.exact(f):
2478 2475 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2479 2476 ui.status(_('removing %s\n') % m.rel(f))
2480 2477 ui.progress(_('deleting'), None)
2481 2478
2482 2479 with repo.wlock():
2483 2480 if not after:
2484 2481 for f in list:
2485 2482 if f in added:
2486 2483 continue # we never unlink added files on remove
2487 2484 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
2488 2485 repo[None].forget(list)
2489 2486
2490 2487 if warn:
2491 2488 for warning in warnings:
2492 2489 ui.warn(warning)
2493 2490
2494 2491 return ret
2495 2492
2496 2493 def cat(ui, repo, ctx, matcher, prefix, **opts):
2497 2494 err = 1
2498 2495
2499 2496 def write(path):
2500 2497 fp = makefileobj(repo, opts.get('output'), ctx.node(),
2501 2498 pathname=os.path.join(prefix, path))
2502 2499 data = ctx[path].data()
2503 2500 if opts.get('decode'):
2504 2501 data = repo.wwritedata(path, data)
2505 2502 fp.write(data)
2506 2503 fp.close()
2507 2504
2508 2505 # Automation often uses hg cat on single files, so special case it
2509 2506 # for performance to avoid the cost of parsing the manifest.
2510 2507 if len(matcher.files()) == 1 and not matcher.anypats():
2511 2508 file = matcher.files()[0]
2512 2509 mfl = repo.manifestlog
2513 2510 mfnode = ctx.manifestnode()
2514 2511 try:
2515 2512 if mfnode and mfl[mfnode].find(file)[0]:
2516 2513 write(file)
2517 2514 return 0
2518 2515 except KeyError:
2519 2516 pass
2520 2517
2521 2518 for abs in ctx.walk(matcher):
2522 2519 write(abs)
2523 2520 err = 0
2524 2521
2525 2522 for subpath in sorted(ctx.substate):
2526 2523 sub = ctx.sub(subpath)
2527 2524 try:
2528 2525 submatch = matchmod.subdirmatcher(subpath, matcher)
2529 2526
2530 2527 if not sub.cat(submatch, os.path.join(prefix, sub._path),
2531 2528 **opts):
2532 2529 err = 0
2533 2530 except error.RepoLookupError:
2534 2531 ui.status(_("skipping missing subrepository: %s\n")
2535 2532 % os.path.join(prefix, subpath))
2536 2533
2537 2534 return err
2538 2535
2539 2536 def commit(ui, repo, commitfunc, pats, opts):
2540 2537 '''commit the specified files or all outstanding changes'''
2541 2538 date = opts.get('date')
2542 2539 if date:
2543 2540 opts['date'] = util.parsedate(date)
2544 2541 message = logmessage(ui, opts)
2545 2542 matcher = scmutil.match(repo[None], pats, opts)
2546 2543
2547 2544 # extract addremove carefully -- this function can be called from a command
2548 2545 # that doesn't support addremove
2549 2546 if opts.get('addremove'):
2550 2547 if scmutil.addremove(repo, matcher, "", opts) != 0:
2551 2548 raise error.Abort(
2552 2549 _("failed to mark all new/missing files as added/removed"))
2553 2550
2554 2551 return commitfunc(ui, repo, message, matcher, opts)
2555 2552
2556 2553 def samefile(f, ctx1, ctx2):
2557 2554 if f in ctx1.manifest():
2558 2555 a = ctx1.filectx(f)
2559 2556 if f in ctx2.manifest():
2560 2557 b = ctx2.filectx(f)
2561 2558 return (not a.cmp(b)
2562 2559 and a.flags() == b.flags())
2563 2560 else:
2564 2561 return False
2565 2562 else:
2566 2563 return f not in ctx2.manifest()
2567 2564
2568 2565 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2569 2566 # avoid cycle context -> subrepo -> cmdutil
2570 2567 from . import context
2571 2568
2572 2569 # amend will reuse the existing user if not specified, but the obsolete
2573 2570 # marker creation requires that the current user's name is specified.
2574 2571 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2575 2572 ui.username() # raise exception if username not set
2576 2573
2577 2574 ui.note(_('amending changeset %s\n') % old)
2578 2575 base = old.p1()
2579 2576 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2580 2577
2581 2578 wlock = lock = newid = None
2582 2579 try:
2583 2580 wlock = repo.wlock()
2584 2581 lock = repo.lock()
2585 2582 with repo.transaction('amend') as tr:
2586 2583 # See if we got a message from -m or -l, if not, open the editor
2587 2584 # with the message of the changeset to amend
2588 2585 message = logmessage(ui, opts)
2589 2586 # ensure logfile does not conflict with later enforcement of the
2590 2587 # message. potential logfile content has been processed by
2591 2588 # `logmessage` anyway.
2592 2589 opts.pop('logfile')
2593 2590 # First, do a regular commit to record all changes in the working
2594 2591 # directory (if there are any)
2595 2592 ui.callhooks = False
2596 2593 activebookmark = repo._bookmarks.active
2597 2594 try:
2598 2595 repo._bookmarks.active = None
2599 2596 opts['message'] = 'temporary amend commit for %s' % old
2600 2597 node = commit(ui, repo, commitfunc, pats, opts)
2601 2598 finally:
2602 2599 repo._bookmarks.active = activebookmark
2603 2600 repo._bookmarks.recordchange(tr)
2604 2601 ui.callhooks = True
2605 2602 ctx = repo[node]
2606 2603
2607 2604 # Participating changesets:
2608 2605 #
2609 2606 # node/ctx o - new (intermediate) commit that contains changes
2610 2607 # | from working dir to go into amending commit
2611 2608 # | (or a workingctx if there were no changes)
2612 2609 # |
2613 2610 # old o - changeset to amend
2614 2611 # |
2615 2612 # base o - parent of amending changeset
2616 2613
2617 2614 # Update extra dict from amended commit (e.g. to preserve graft
2618 2615 # source)
2619 2616 extra.update(old.extra())
2620 2617
2621 2618 # Also update it from the intermediate commit or from the wctx
2622 2619 extra.update(ctx.extra())
2623 2620
2624 2621 if len(old.parents()) > 1:
2625 2622 # ctx.files() isn't reliable for merges, so fall back to the
2626 2623 # slower repo.status() method
2627 2624 files = set([fn for st in repo.status(base, old)[:3]
2628 2625 for fn in st])
2629 2626 else:
2630 2627 files = set(old.files())
2631 2628
2632 2629 # Second, we use either the commit we just did, or if there were no
2633 2630 # changes the parent of the working directory as the version of the
2634 2631 # files in the final amend commit
2635 2632 if node:
2636 2633 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2637 2634
2638 2635 user = ctx.user()
2639 2636 date = ctx.date()
2640 2637 # Recompute copies (avoid recording a -> b -> a)
2641 2638 copied = copies.pathcopies(base, ctx)
2642 2639 if old.p2:
2643 2640 copied.update(copies.pathcopies(old.p2(), ctx))
2644 2641
2645 2642 # Prune files which were reverted by the updates: if old
2646 2643 # introduced file X and our intermediate commit, node,
2647 2644 # renamed that file, then those two files are the same and
2648 2645 # we can discard X from our list of files. Likewise if X
2649 2646 # was deleted, it's no longer relevant
2650 2647 files.update(ctx.files())
2651 2648 files = [f for f in files if not samefile(f, ctx, base)]
2652 2649
2653 2650 def filectxfn(repo, ctx_, path):
2654 2651 try:
2655 2652 fctx = ctx[path]
2656 2653 flags = fctx.flags()
2657 2654 mctx = context.memfilectx(repo,
2658 2655 fctx.path(), fctx.data(),
2659 2656 islink='l' in flags,
2660 2657 isexec='x' in flags,
2661 2658 copied=copied.get(path))
2662 2659 return mctx
2663 2660 except KeyError:
2664 2661 return None
2665 2662 else:
2666 2663 ui.note(_('copying changeset %s to %s\n') % (old, base))
2667 2664
2668 2665 # Use version of files as in the old cset
2669 2666 def filectxfn(repo, ctx_, path):
2670 2667 try:
2671 2668 return old.filectx(path)
2672 2669 except KeyError:
2673 2670 return None
2674 2671
2675 2672 user = opts.get('user') or old.user()
2676 2673 date = opts.get('date') or old.date()
2677 2674 editform = mergeeditform(old, 'commit.amend')
2678 2675 editor = getcommiteditor(editform=editform, **opts)
2679 2676 if not message:
2680 2677 editor = getcommiteditor(edit=True, editform=editform)
2681 2678 message = old.description()
2682 2679
2683 2680 pureextra = extra.copy()
2684 2681 extra['amend_source'] = old.hex()
2685 2682
2686 2683 new = context.memctx(repo,
2687 2684 parents=[base.node(), old.p2().node()],
2688 2685 text=message,
2689 2686 files=files,
2690 2687 filectxfn=filectxfn,
2691 2688 user=user,
2692 2689 date=date,
2693 2690 extra=extra,
2694 2691 editor=editor)
2695 2692
2696 2693 newdesc = changelog.stripdesc(new.description())
2697 2694 if ((not node)
2698 2695 and newdesc == old.description()
2699 2696 and user == old.user()
2700 2697 and date == old.date()
2701 2698 and pureextra == old.extra()):
2702 2699 # nothing changed. continuing here would create a new node
2703 2700 # anyway because of the amend_source noise.
2704 2701 #
2705 2702 # This not what we expect from amend.
2706 2703 return old.node()
2707 2704
2708 2705 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2709 2706 try:
2710 2707 if opts.get('secret'):
2711 2708 commitphase = 'secret'
2712 2709 else:
2713 2710 commitphase = old.phase()
2714 2711 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2715 2712 newid = repo.commitctx(new)
2716 2713 finally:
2717 2714 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2718 2715 if newid != old.node():
2719 2716 # Reroute the working copy parent to the new changeset
2720 2717 repo.setparents(newid, nullid)
2721 2718
2722 2719 # Move bookmarks from old parent to amend commit
2723 2720 bms = repo.nodebookmarks(old.node())
2724 2721 if bms:
2725 2722 marks = repo._bookmarks
2726 2723 for bm in bms:
2727 2724 ui.debug('moving bookmarks %r from %s to %s\n' %
2728 2725 (marks, old.hex(), hex(newid)))
2729 2726 marks[bm] = newid
2730 2727 marks.recordchange(tr)
2731 2728 #commit the whole amend process
2732 2729 if createmarkers:
2733 2730 # mark the new changeset as successor of the rewritten one
2734 2731 new = repo[newid]
2735 2732 obs = [(old, (new,))]
2736 2733 if node:
2737 2734 obs.append((ctx, ()))
2738 2735
2739 2736 obsolete.createmarkers(repo, obs)
2740 2737 if not createmarkers and newid != old.node():
2741 2738 # Strip the intermediate commit (if there was one) and the amended
2742 2739 # commit
2743 2740 if node:
2744 2741 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2745 2742 ui.note(_('stripping amended changeset %s\n') % old)
2746 2743 repair.strip(ui, repo, old.node(), topic='amend-backup')
2747 2744 finally:
2748 2745 lockmod.release(lock, wlock)
2749 2746 return newid
2750 2747
2751 2748 def commiteditor(repo, ctx, subs, editform=''):
2752 2749 if ctx.description():
2753 2750 return ctx.description()
2754 2751 return commitforceeditor(repo, ctx, subs, editform=editform,
2755 2752 unchangedmessagedetection=True)
2756 2753
2757 2754 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2758 2755 editform='', unchangedmessagedetection=False):
2759 2756 if not extramsg:
2760 2757 extramsg = _("Leave message empty to abort commit.")
2761 2758
2762 2759 forms = [e for e in editform.split('.') if e]
2763 2760 forms.insert(0, 'changeset')
2764 2761 templatetext = None
2765 2762 while forms:
2766 2763 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2767 2764 if tmpl:
2768 2765 templatetext = committext = buildcommittemplate(
2769 2766 repo, ctx, subs, extramsg, tmpl)
2770 2767 break
2771 2768 forms.pop()
2772 2769 else:
2773 2770 committext = buildcommittext(repo, ctx, subs, extramsg)
2774 2771
2775 2772 # run editor in the repository root
2776 2773 olddir = pycompat.getcwd()
2777 2774 os.chdir(repo.root)
2778 2775
2779 2776 # make in-memory changes visible to external process
2780 2777 tr = repo.currenttransaction()
2781 2778 repo.dirstate.write(tr)
2782 2779 pending = tr and tr.writepending() and repo.root
2783 2780
2784 2781 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2785 2782 editform=editform, pending=pending,
2786 2783 repopath=repo.path)
2787 2784 text = editortext
2788 2785
2789 2786 # strip away anything below this special string (used for editors that want
2790 2787 # to display the diff)
2791 2788 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
2792 2789 if stripbelow:
2793 2790 text = text[:stripbelow.start()]
2794 2791
2795 2792 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2796 2793 os.chdir(olddir)
2797 2794
2798 2795 if finishdesc:
2799 2796 text = finishdesc(text)
2800 2797 if not text.strip():
2801 2798 raise error.Abort(_("empty commit message"))
2802 2799 if unchangedmessagedetection and editortext == templatetext:
2803 2800 raise error.Abort(_("commit message unchanged"))
2804 2801
2805 2802 return text
2806 2803
2807 2804 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2808 2805 ui = repo.ui
2809 2806 tmpl, mapfile = gettemplate(ui, tmpl, None)
2810 2807
2811 2808 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2812 2809
2813 2810 for k, v in repo.ui.configitems('committemplate'):
2814 2811 if k != 'changeset':
2815 2812 t.t.cache[k] = v
2816 2813
2817 2814 if not extramsg:
2818 2815 extramsg = '' # ensure that extramsg is string
2819 2816
2820 2817 ui.pushbuffer()
2821 2818 t.show(ctx, extramsg=extramsg)
2822 2819 return ui.popbuffer()
2823 2820
2824 2821 def hgprefix(msg):
2825 2822 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2826 2823
2827 2824 def buildcommittext(repo, ctx, subs, extramsg):
2828 2825 edittext = []
2829 2826 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2830 2827 if ctx.description():
2831 2828 edittext.append(ctx.description())
2832 2829 edittext.append("")
2833 2830 edittext.append("") # Empty line between message and comments.
2834 2831 edittext.append(hgprefix(_("Enter commit message."
2835 2832 " Lines beginning with 'HG:' are removed.")))
2836 2833 edittext.append(hgprefix(extramsg))
2837 2834 edittext.append("HG: --")
2838 2835 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2839 2836 if ctx.p2():
2840 2837 edittext.append(hgprefix(_("branch merge")))
2841 2838 if ctx.branch():
2842 2839 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2843 2840 if bookmarks.isactivewdirparent(repo):
2844 2841 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2845 2842 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2846 2843 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2847 2844 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2848 2845 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2849 2846 if not added and not modified and not removed:
2850 2847 edittext.append(hgprefix(_("no files changed")))
2851 2848 edittext.append("")
2852 2849
2853 2850 return "\n".join(edittext)
2854 2851
2855 2852 def commitstatus(repo, node, branch, bheads=None, opts=None):
2856 2853 if opts is None:
2857 2854 opts = {}
2858 2855 ctx = repo[node]
2859 2856 parents = ctx.parents()
2860 2857
2861 2858 if (not opts.get('amend') and bheads and node not in bheads and not
2862 2859 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2863 2860 repo.ui.status(_('created new head\n'))
2864 2861 # The message is not printed for initial roots. For the other
2865 2862 # changesets, it is printed in the following situations:
2866 2863 #
2867 2864 # Par column: for the 2 parents with ...
2868 2865 # N: null or no parent
2869 2866 # B: parent is on another named branch
2870 2867 # C: parent is a regular non head changeset
2871 2868 # H: parent was a branch head of the current branch
2872 2869 # Msg column: whether we print "created new head" message
2873 2870 # In the following, it is assumed that there already exists some
2874 2871 # initial branch heads of the current branch, otherwise nothing is
2875 2872 # printed anyway.
2876 2873 #
2877 2874 # Par Msg Comment
2878 2875 # N N y additional topo root
2879 2876 #
2880 2877 # B N y additional branch root
2881 2878 # C N y additional topo head
2882 2879 # H N n usual case
2883 2880 #
2884 2881 # B B y weird additional branch root
2885 2882 # C B y branch merge
2886 2883 # H B n merge with named branch
2887 2884 #
2888 2885 # C C y additional head from merge
2889 2886 # C H n merge with a head
2890 2887 #
2891 2888 # H H n head merge: head count decreases
2892 2889
2893 2890 if not opts.get('close_branch'):
2894 2891 for r in parents:
2895 2892 if r.closesbranch() and r.branch() == branch:
2896 2893 repo.ui.status(_('reopening closed branch head %d\n') % r)
2897 2894
2898 2895 if repo.ui.debugflag:
2899 2896 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2900 2897 elif repo.ui.verbose:
2901 2898 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2902 2899
2903 2900 def postcommitstatus(repo, pats, opts):
2904 2901 return repo.status(match=scmutil.match(repo[None], pats, opts))
2905 2902
2906 2903 def revert(ui, repo, ctx, parents, *pats, **opts):
2907 2904 parent, p2 = parents
2908 2905 node = ctx.node()
2909 2906
2910 2907 mf = ctx.manifest()
2911 2908 if node == p2:
2912 2909 parent = p2
2913 2910
2914 2911 # need all matching names in dirstate and manifest of target rev,
2915 2912 # so have to walk both. do not print errors if files exist in one
2916 2913 # but not other. in both cases, filesets should be evaluated against
2917 2914 # workingctx to get consistent result (issue4497). this means 'set:**'
2918 2915 # cannot be used to select missing files from target rev.
2919 2916
2920 2917 # `names` is a mapping for all elements in working copy and target revision
2921 2918 # The mapping is in the form:
2922 2919 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2923 2920 names = {}
2924 2921
2925 2922 with repo.wlock():
2926 2923 ## filling of the `names` mapping
2927 2924 # walk dirstate to fill `names`
2928 2925
2929 2926 interactive = opts.get('interactive', False)
2930 2927 wctx = repo[None]
2931 2928 m = scmutil.match(wctx, pats, opts)
2932 2929
2933 2930 # we'll need this later
2934 2931 targetsubs = sorted(s for s in wctx.substate if m(s))
2935 2932
2936 2933 if not m.always():
2937 2934 for abs in repo.walk(matchmod.badmatch(m, lambda x, y: False)):
2938 2935 names[abs] = m.rel(abs), m.exact(abs)
2939 2936
2940 2937 # walk target manifest to fill `names`
2941 2938
2942 2939 def badfn(path, msg):
2943 2940 if path in names:
2944 2941 return
2945 2942 if path in ctx.substate:
2946 2943 return
2947 2944 path_ = path + '/'
2948 2945 for f in names:
2949 2946 if f.startswith(path_):
2950 2947 return
2951 2948 ui.warn("%s: %s\n" % (m.rel(path), msg))
2952 2949
2953 2950 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
2954 2951 if abs not in names:
2955 2952 names[abs] = m.rel(abs), m.exact(abs)
2956 2953
2957 2954 # Find status of all file in `names`.
2958 2955 m = scmutil.matchfiles(repo, names)
2959 2956
2960 2957 changes = repo.status(node1=node, match=m,
2961 2958 unknown=True, ignored=True, clean=True)
2962 2959 else:
2963 2960 changes = repo.status(node1=node, match=m)
2964 2961 for kind in changes:
2965 2962 for abs in kind:
2966 2963 names[abs] = m.rel(abs), m.exact(abs)
2967 2964
2968 2965 m = scmutil.matchfiles(repo, names)
2969 2966
2970 2967 modified = set(changes.modified)
2971 2968 added = set(changes.added)
2972 2969 removed = set(changes.removed)
2973 2970 _deleted = set(changes.deleted)
2974 2971 unknown = set(changes.unknown)
2975 2972 unknown.update(changes.ignored)
2976 2973 clean = set(changes.clean)
2977 2974 modadded = set()
2978 2975
2979 2976 # We need to account for the state of the file in the dirstate,
2980 2977 # even when we revert against something else than parent. This will
2981 2978 # slightly alter the behavior of revert (doing back up or not, delete
2982 2979 # or just forget etc).
2983 2980 if parent == node:
2984 2981 dsmodified = modified
2985 2982 dsadded = added
2986 2983 dsremoved = removed
2987 2984 # store all local modifications, useful later for rename detection
2988 2985 localchanges = dsmodified | dsadded
2989 2986 modified, added, removed = set(), set(), set()
2990 2987 else:
2991 2988 changes = repo.status(node1=parent, match=m)
2992 2989 dsmodified = set(changes.modified)
2993 2990 dsadded = set(changes.added)
2994 2991 dsremoved = set(changes.removed)
2995 2992 # store all local modifications, useful later for rename detection
2996 2993 localchanges = dsmodified | dsadded
2997 2994
2998 2995 # only take into account for removes between wc and target
2999 2996 clean |= dsremoved - removed
3000 2997 dsremoved &= removed
3001 2998 # distinct between dirstate remove and other
3002 2999 removed -= dsremoved
3003 3000
3004 3001 modadded = added & dsmodified
3005 3002 added -= modadded
3006 3003
3007 3004 # tell newly modified apart.
3008 3005 dsmodified &= modified
3009 3006 dsmodified |= modified & dsadded # dirstate added may need backup
3010 3007 modified -= dsmodified
3011 3008
3012 3009 # We need to wait for some post-processing to update this set
3013 3010 # before making the distinction. The dirstate will be used for
3014 3011 # that purpose.
3015 3012 dsadded = added
3016 3013
3017 3014 # in case of merge, files that are actually added can be reported as
3018 3015 # modified, we need to post process the result
3019 3016 if p2 != nullid:
3020 3017 mergeadd = set(dsmodified)
3021 3018 for path in dsmodified:
3022 3019 if path in mf:
3023 3020 mergeadd.remove(path)
3024 3021 dsadded |= mergeadd
3025 3022 dsmodified -= mergeadd
3026 3023
3027 3024 # if f is a rename, update `names` to also revert the source
3028 3025 cwd = repo.getcwd()
3029 3026 for f in localchanges:
3030 3027 src = repo.dirstate.copied(f)
3031 3028 # XXX should we check for rename down to target node?
3032 3029 if src and src not in names and repo.dirstate[src] == 'r':
3033 3030 dsremoved.add(src)
3034 3031 names[src] = (repo.pathto(src, cwd), True)
3035 3032
3036 3033 # determine the exact nature of the deleted changesets
3037 3034 deladded = set(_deleted)
3038 3035 for path in _deleted:
3039 3036 if path in mf:
3040 3037 deladded.remove(path)
3041 3038 deleted = _deleted - deladded
3042 3039
3043 3040 # distinguish between file to forget and the other
3044 3041 added = set()
3045 3042 for abs in dsadded:
3046 3043 if repo.dirstate[abs] != 'a':
3047 3044 added.add(abs)
3048 3045 dsadded -= added
3049 3046
3050 3047 for abs in deladded:
3051 3048 if repo.dirstate[abs] == 'a':
3052 3049 dsadded.add(abs)
3053 3050 deladded -= dsadded
3054 3051
3055 3052 # For files marked as removed, we check if an unknown file is present at
3056 3053 # the same path. If a such file exists it may need to be backed up.
3057 3054 # Making the distinction at this stage helps have simpler backup
3058 3055 # logic.
3059 3056 removunk = set()
3060 3057 for abs in removed:
3061 3058 target = repo.wjoin(abs)
3062 3059 if os.path.lexists(target):
3063 3060 removunk.add(abs)
3064 3061 removed -= removunk
3065 3062
3066 3063 dsremovunk = set()
3067 3064 for abs in dsremoved:
3068 3065 target = repo.wjoin(abs)
3069 3066 if os.path.lexists(target):
3070 3067 dsremovunk.add(abs)
3071 3068 dsremoved -= dsremovunk
3072 3069
3073 3070 # action to be actually performed by revert
3074 3071 # (<list of file>, message>) tuple
3075 3072 actions = {'revert': ([], _('reverting %s\n')),
3076 3073 'add': ([], _('adding %s\n')),
3077 3074 'remove': ([], _('removing %s\n')),
3078 3075 'drop': ([], _('removing %s\n')),
3079 3076 'forget': ([], _('forgetting %s\n')),
3080 3077 'undelete': ([], _('undeleting %s\n')),
3081 3078 'noop': (None, _('no changes needed to %s\n')),
3082 3079 'unknown': (None, _('file not managed: %s\n')),
3083 3080 }
3084 3081
3085 3082 # "constant" that convey the backup strategy.
3086 3083 # All set to `discard` if `no-backup` is set do avoid checking
3087 3084 # no_backup lower in the code.
3088 3085 # These values are ordered for comparison purposes
3089 3086 backupinteractive = 3 # do backup if interactively modified
3090 3087 backup = 2 # unconditionally do backup
3091 3088 check = 1 # check if the existing file differs from target
3092 3089 discard = 0 # never do backup
3093 3090 if opts.get('no_backup'):
3094 3091 backupinteractive = backup = check = discard
3095 3092 if interactive:
3096 3093 dsmodifiedbackup = backupinteractive
3097 3094 else:
3098 3095 dsmodifiedbackup = backup
3099 3096 tobackup = set()
3100 3097
3101 3098 backupanddel = actions['remove']
3102 3099 if not opts.get('no_backup'):
3103 3100 backupanddel = actions['drop']
3104 3101
3105 3102 disptable = (
3106 3103 # dispatch table:
3107 3104 # file state
3108 3105 # action
3109 3106 # make backup
3110 3107
3111 3108 ## Sets that results that will change file on disk
3112 3109 # Modified compared to target, no local change
3113 3110 (modified, actions['revert'], discard),
3114 3111 # Modified compared to target, but local file is deleted
3115 3112 (deleted, actions['revert'], discard),
3116 3113 # Modified compared to target, local change
3117 3114 (dsmodified, actions['revert'], dsmodifiedbackup),
3118 3115 # Added since target
3119 3116 (added, actions['remove'], discard),
3120 3117 # Added in working directory
3121 3118 (dsadded, actions['forget'], discard),
3122 3119 # Added since target, have local modification
3123 3120 (modadded, backupanddel, backup),
3124 3121 # Added since target but file is missing in working directory
3125 3122 (deladded, actions['drop'], discard),
3126 3123 # Removed since target, before working copy parent
3127 3124 (removed, actions['add'], discard),
3128 3125 # Same as `removed` but an unknown file exists at the same path
3129 3126 (removunk, actions['add'], check),
3130 3127 # Removed since targe, marked as such in working copy parent
3131 3128 (dsremoved, actions['undelete'], discard),
3132 3129 # Same as `dsremoved` but an unknown file exists at the same path
3133 3130 (dsremovunk, actions['undelete'], check),
3134 3131 ## the following sets does not result in any file changes
3135 3132 # File with no modification
3136 3133 (clean, actions['noop'], discard),
3137 3134 # Existing file, not tracked anywhere
3138 3135 (unknown, actions['unknown'], discard),
3139 3136 )
3140 3137
3141 3138 for abs, (rel, exact) in sorted(names.items()):
3142 3139 # target file to be touch on disk (relative to cwd)
3143 3140 target = repo.wjoin(abs)
3144 3141 # search the entry in the dispatch table.
3145 3142 # if the file is in any of these sets, it was touched in the working
3146 3143 # directory parent and we are sure it needs to be reverted.
3147 3144 for table, (xlist, msg), dobackup in disptable:
3148 3145 if abs not in table:
3149 3146 continue
3150 3147 if xlist is not None:
3151 3148 xlist.append(abs)
3152 3149 if dobackup:
3153 3150 # If in interactive mode, don't automatically create
3154 3151 # .orig files (issue4793)
3155 3152 if dobackup == backupinteractive:
3156 3153 tobackup.add(abs)
3157 3154 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3158 3155 bakname = scmutil.origpath(ui, repo, rel)
3159 3156 ui.note(_('saving current version of %s as %s\n') %
3160 3157 (rel, bakname))
3161 3158 if not opts.get('dry_run'):
3162 3159 if interactive:
3163 3160 util.copyfile(target, bakname)
3164 3161 else:
3165 3162 util.rename(target, bakname)
3166 3163 if ui.verbose or not exact:
3167 3164 if not isinstance(msg, basestring):
3168 3165 msg = msg(abs)
3169 3166 ui.status(msg % rel)
3170 3167 elif exact:
3171 3168 ui.warn(msg % rel)
3172 3169 break
3173 3170
3174 3171 if not opts.get('dry_run'):
3175 3172 needdata = ('revert', 'add', 'undelete')
3176 3173 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3177 3174 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3178 3175
3179 3176 if targetsubs:
3180 3177 # Revert the subrepos on the revert list
3181 3178 for sub in targetsubs:
3182 3179 try:
3183 3180 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3184 3181 except KeyError:
3185 3182 raise error.Abort("subrepository '%s' does not exist in %s!"
3186 3183 % (sub, short(ctx.node())))
3187 3184
3188 3185 def _revertprefetch(repo, ctx, *files):
3189 3186 """Let extension changing the storage layer prefetch content"""
3190 3187 pass
3191 3188
3192 3189 def _performrevert(repo, parents, ctx, actions, interactive=False,
3193 3190 tobackup=None):
3194 3191 """function that actually perform all the actions computed for revert
3195 3192
3196 3193 This is an independent function to let extension to plug in and react to
3197 3194 the imminent revert.
3198 3195
3199 3196 Make sure you have the working directory locked when calling this function.
3200 3197 """
3201 3198 parent, p2 = parents
3202 3199 node = ctx.node()
3203 3200 excluded_files = []
3204 3201 matcher_opts = {"exclude": excluded_files}
3205 3202
3206 3203 def checkout(f):
3207 3204 fc = ctx[f]
3208 3205 repo.wwrite(f, fc.data(), fc.flags())
3209 3206
3210 3207 def doremove(f):
3211 3208 try:
3212 3209 util.unlinkpath(repo.wjoin(f))
3213 3210 except OSError:
3214 3211 pass
3215 3212 repo.dirstate.remove(f)
3216 3213
3217 3214 audit_path = pathutil.pathauditor(repo.root)
3218 3215 for f in actions['forget'][0]:
3219 3216 if interactive:
3220 3217 choice = repo.ui.promptchoice(
3221 3218 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
3222 3219 if choice == 0:
3223 3220 repo.dirstate.drop(f)
3224 3221 else:
3225 3222 excluded_files.append(repo.wjoin(f))
3226 3223 else:
3227 3224 repo.dirstate.drop(f)
3228 3225 for f in actions['remove'][0]:
3229 3226 audit_path(f)
3230 3227 if interactive:
3231 3228 choice = repo.ui.promptchoice(
3232 3229 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
3233 3230 if choice == 0:
3234 3231 doremove(f)
3235 3232 else:
3236 3233 excluded_files.append(repo.wjoin(f))
3237 3234 else:
3238 3235 doremove(f)
3239 3236 for f in actions['drop'][0]:
3240 3237 audit_path(f)
3241 3238 repo.dirstate.remove(f)
3242 3239
3243 3240 normal = None
3244 3241 if node == parent:
3245 3242 # We're reverting to our parent. If possible, we'd like status
3246 3243 # to report the file as clean. We have to use normallookup for
3247 3244 # merges to avoid losing information about merged/dirty files.
3248 3245 if p2 != nullid:
3249 3246 normal = repo.dirstate.normallookup
3250 3247 else:
3251 3248 normal = repo.dirstate.normal
3252 3249
3253 3250 newlyaddedandmodifiedfiles = set()
3254 3251 if interactive:
3255 3252 # Prompt the user for changes to revert
3256 3253 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3257 3254 m = scmutil.match(ctx, torevert, matcher_opts)
3258 3255 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3259 3256 diffopts.nodates = True
3260 3257 diffopts.git = True
3261 3258 reversehunks = repo.ui.configbool('experimental',
3262 3259 'revertalternateinteractivemode',
3263 3260 True)
3264 3261 if reversehunks:
3265 3262 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3266 3263 else:
3267 3264 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3268 3265 originalchunks = patch.parsepatch(diff)
3269 3266 operation = 'discard' if node == parent else 'revert'
3270 3267
3271 3268 try:
3272 3269
3273 3270 chunks, opts = recordfilter(repo.ui, originalchunks,
3274 3271 operation=operation)
3275 3272 if reversehunks:
3276 3273 chunks = patch.reversehunks(chunks)
3277 3274
3278 3275 except patch.PatchError as err:
3279 3276 raise error.Abort(_('error parsing patch: %s') % err)
3280 3277
3281 3278 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3282 3279 if tobackup is None:
3283 3280 tobackup = set()
3284 3281 # Apply changes
3285 3282 fp = stringio()
3286 3283 for c in chunks:
3287 3284 # Create a backup file only if this hunk should be backed up
3288 3285 if ishunk(c) and c.header.filename() in tobackup:
3289 3286 abs = c.header.filename()
3290 3287 target = repo.wjoin(abs)
3291 3288 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3292 3289 util.copyfile(target, bakname)
3293 3290 tobackup.remove(abs)
3294 3291 c.write(fp)
3295 3292 dopatch = fp.tell()
3296 3293 fp.seek(0)
3297 3294 if dopatch:
3298 3295 try:
3299 3296 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3300 3297 except patch.PatchError as err:
3301 3298 raise error.Abort(str(err))
3302 3299 del fp
3303 3300 else:
3304 3301 for f in actions['revert'][0]:
3305 3302 checkout(f)
3306 3303 if normal:
3307 3304 normal(f)
3308 3305
3309 3306 for f in actions['add'][0]:
3310 3307 # Don't checkout modified files, they are already created by the diff
3311 3308 if f not in newlyaddedandmodifiedfiles:
3312 3309 checkout(f)
3313 3310 repo.dirstate.add(f)
3314 3311
3315 3312 normal = repo.dirstate.normallookup
3316 3313 if node == parent and p2 == nullid:
3317 3314 normal = repo.dirstate.normal
3318 3315 for f in actions['undelete'][0]:
3319 3316 checkout(f)
3320 3317 normal(f)
3321 3318
3322 3319 copied = copies.pathcopies(repo[parent], ctx)
3323 3320
3324 3321 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3325 3322 if f in copied:
3326 3323 repo.dirstate.copy(copied[f], f)
3327 3324
3328 3325 def command(table):
3329 3326 """Returns a function object to be used as a decorator for making commands.
3330 3327
3331 3328 This function receives a command table as its argument. The table should
3332 3329 be a dict.
3333 3330
3334 3331 The returned function can be used as a decorator for adding commands
3335 3332 to that command table. This function accepts multiple arguments to define
3336 3333 a command.
3337 3334
3338 3335 The first argument is the command name.
3339 3336
3340 3337 The options argument is an iterable of tuples defining command arguments.
3341 3338 See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
3342 3339
3343 3340 The synopsis argument defines a short, one line summary of how to use the
3344 3341 command. This shows up in the help output.
3345 3342
3346 3343 The norepo argument defines whether the command does not require a
3347 3344 local repository. Most commands operate against a repository, thus the
3348 3345 default is False.
3349 3346
3350 3347 The optionalrepo argument defines whether the command optionally requires
3351 3348 a local repository.
3352 3349
3353 3350 The inferrepo argument defines whether to try to find a repository from the
3354 3351 command line arguments. If True, arguments will be examined for potential
3355 3352 repository locations. See ``findrepo()``. If a repository is found, it
3356 3353 will be used.
3357 3354 """
3358 3355 def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False,
3359 3356 inferrepo=False):
3360 3357 def decorator(func):
3361 3358 func.norepo = norepo
3362 3359 func.optionalrepo = optionalrepo
3363 3360 func.inferrepo = inferrepo
3364 3361 if synopsis:
3365 3362 table[name] = func, list(options), synopsis
3366 3363 else:
3367 3364 table[name] = func, list(options)
3368 3365 return func
3369 3366 return decorator
3370 3367
3371 3368 return cmd
3372 3369
3373 3370 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3374 3371 # commands.outgoing. "missing" is "missing" of the result of
3375 3372 # "findcommonoutgoing()"
3376 3373 outgoinghooks = util.hooks()
3377 3374
3378 3375 # a list of (ui, repo) functions called by commands.summary
3379 3376 summaryhooks = util.hooks()
3380 3377
3381 3378 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3382 3379 #
3383 3380 # functions should return tuple of booleans below, if 'changes' is None:
3384 3381 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3385 3382 #
3386 3383 # otherwise, 'changes' is a tuple of tuples below:
3387 3384 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3388 3385 # - (desturl, destbranch, destpeer, outgoing)
3389 3386 summaryremotehooks = util.hooks()
3390 3387
3391 3388 # A list of state files kept by multistep operations like graft.
3392 3389 # Since graft cannot be aborted, it is considered 'clearable' by update.
3393 3390 # note: bisect is intentionally excluded
3394 3391 # (state file, clearable, allowcommit, error, hint)
3395 3392 unfinishedstates = [
3396 3393 ('graftstate', True, False, _('graft in progress'),
3397 3394 _("use 'hg graft --continue' or 'hg update' to abort")),
3398 3395 ('updatestate', True, False, _('last update was interrupted'),
3399 3396 _("use 'hg update' to get a consistent checkout"))
3400 3397 ]
3401 3398
3402 3399 def checkunfinished(repo, commit=False):
3403 3400 '''Look for an unfinished multistep operation, like graft, and abort
3404 3401 if found. It's probably good to check this right before
3405 3402 bailifchanged().
3406 3403 '''
3407 3404 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3408 3405 if commit and allowcommit:
3409 3406 continue
3410 3407 if repo.vfs.exists(f):
3411 3408 raise error.Abort(msg, hint=hint)
3412 3409
3413 3410 def clearunfinished(repo):
3414 3411 '''Check for unfinished operations (as above), and clear the ones
3415 3412 that are clearable.
3416 3413 '''
3417 3414 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3418 3415 if not clearable and repo.vfs.exists(f):
3419 3416 raise error.Abort(msg, hint=hint)
3420 3417 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3421 3418 if clearable and repo.vfs.exists(f):
3422 3419 util.unlink(repo.join(f))
3423 3420
3424 3421 afterresolvedstates = [
3425 3422 ('graftstate',
3426 3423 _('hg graft --continue')),
3427 3424 ]
3428 3425
3429 3426 def howtocontinue(repo):
3430 3427 '''Check for an unfinished operation and return the command to finish
3431 3428 it.
3432 3429
3433 3430 afterresolvedstates tuples define a .hg/{file} and the corresponding
3434 3431 command needed to finish it.
3435 3432
3436 3433 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3437 3434 a boolean.
3438 3435 '''
3439 3436 contmsg = _("continue: %s")
3440 3437 for f, msg in afterresolvedstates:
3441 3438 if repo.vfs.exists(f):
3442 3439 return contmsg % msg, True
3443 3440 workingctx = repo[None]
3444 3441 dirty = any(repo.status()) or any(workingctx.sub(s).dirty()
3445 3442 for s in workingctx.substate)
3446 3443 if dirty:
3447 3444 return contmsg % _("hg commit"), False
3448 3445 return None, None
3449 3446
3450 3447 def checkafterresolved(repo):
3451 3448 '''Inform the user about the next action after completing hg resolve
3452 3449
3453 3450 If there's a matching afterresolvedstates, howtocontinue will yield
3454 3451 repo.ui.warn as the reporter.
3455 3452
3456 3453 Otherwise, it will yield repo.ui.note.
3457 3454 '''
3458 3455 msg, warning = howtocontinue(repo)
3459 3456 if msg is not None:
3460 3457 if warning:
3461 3458 repo.ui.warn("%s\n" % msg)
3462 3459 else:
3463 3460 repo.ui.note("%s\n" % msg)
3464 3461
3465 3462 def wrongtooltocontinue(repo, task):
3466 3463 '''Raise an abort suggesting how to properly continue if there is an
3467 3464 active task.
3468 3465
3469 3466 Uses howtocontinue() to find the active task.
3470 3467
3471 3468 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3472 3469 a hint.
3473 3470 '''
3474 3471 after = howtocontinue(repo)
3475 3472 hint = None
3476 3473 if after[1]:
3477 3474 hint = after[0]
3478 3475 raise error.Abort(_('no %s in progress') % task, hint=hint)
@@ -1,1269 +1,1282
1 1 # templater.py - template expansion for output
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import os
11 11 import re
12 12 import types
13 13
14 14 from .i18n import _
15 15 from . import (
16 16 config,
17 17 error,
18 18 minirst,
19 19 parser,
20 20 pycompat,
21 21 registrar,
22 22 revset as revsetmod,
23 23 revsetlang,
24 24 templatefilters,
25 25 templatekw,
26 26 util,
27 27 )
28 28
29 29 # template parsing
30 30
31 31 elements = {
32 32 # token-type: binding-strength, primary, prefix, infix, suffix
33 33 "(": (20, None, ("group", 1, ")"), ("func", 1, ")"), None),
34 34 ",": (2, None, None, ("list", 2), None),
35 35 "|": (5, None, None, ("|", 5), None),
36 36 "%": (6, None, None, ("%", 6), None),
37 37 ")": (0, None, None, None, None),
38 38 "+": (3, None, None, ("+", 3), None),
39 39 "-": (3, None, ("negate", 10), ("-", 3), None),
40 40 "*": (4, None, None, ("*", 4), None),
41 41 "/": (4, None, None, ("/", 4), None),
42 42 "integer": (0, "integer", None, None, None),
43 43 "symbol": (0, "symbol", None, None, None),
44 44 "string": (0, "string", None, None, None),
45 45 "template": (0, "template", None, None, None),
46 46 "end": (0, None, None, None, None),
47 47 }
48 48
49 49 def tokenize(program, start, end, term=None):
50 50 """Parse a template expression into a stream of tokens, which must end
51 51 with term if specified"""
52 52 pos = start
53 53 while pos < end:
54 54 c = program[pos]
55 55 if c.isspace(): # skip inter-token whitespace
56 56 pass
57 57 elif c in "(,)%|+-*/": # handle simple operators
58 58 yield (c, None, pos)
59 59 elif c in '"\'': # handle quoted templates
60 60 s = pos + 1
61 61 data, pos = _parsetemplate(program, s, end, c)
62 62 yield ('template', data, s)
63 63 pos -= 1
64 64 elif c == 'r' and program[pos:pos + 2] in ("r'", 'r"'):
65 65 # handle quoted strings
66 66 c = program[pos + 1]
67 67 s = pos = pos + 2
68 68 while pos < end: # find closing quote
69 69 d = program[pos]
70 70 if d == '\\': # skip over escaped characters
71 71 pos += 2
72 72 continue
73 73 if d == c:
74 74 yield ('string', program[s:pos], s)
75 75 break
76 76 pos += 1
77 77 else:
78 78 raise error.ParseError(_("unterminated string"), s)
79 79 elif c.isdigit():
80 80 s = pos
81 81 while pos < end:
82 82 d = program[pos]
83 83 if not d.isdigit():
84 84 break
85 85 pos += 1
86 86 yield ('integer', program[s:pos], s)
87 87 pos -= 1
88 88 elif (c == '\\' and program[pos:pos + 2] in (r"\'", r'\"')
89 89 or c == 'r' and program[pos:pos + 3] in (r"r\'", r'r\"')):
90 90 # handle escaped quoted strings for compatibility with 2.9.2-3.4,
91 91 # where some of nested templates were preprocessed as strings and
92 92 # then compiled. therefore, \"...\" was allowed. (issue4733)
93 93 #
94 94 # processing flow of _evalifliteral() at 5ab28a2e9962:
95 95 # outer template string -> stringify() -> compiletemplate()
96 96 # ------------------------ ------------ ------------------
97 97 # {f("\\\\ {g(\"\\\"\")}"} \\ {g("\"")} [r'\\', {g("\"")}]
98 98 # ~~~~~~~~
99 99 # escaped quoted string
100 100 if c == 'r':
101 101 pos += 1
102 102 token = 'string'
103 103 else:
104 104 token = 'template'
105 105 quote = program[pos:pos + 2]
106 106 s = pos = pos + 2
107 107 while pos < end: # find closing escaped quote
108 108 if program.startswith('\\\\\\', pos, end):
109 109 pos += 4 # skip over double escaped characters
110 110 continue
111 111 if program.startswith(quote, pos, end):
112 112 # interpret as if it were a part of an outer string
113 113 data = parser.unescapestr(program[s:pos])
114 114 if token == 'template':
115 115 data = _parsetemplate(data, 0, len(data))[0]
116 116 yield (token, data, s)
117 117 pos += 1
118 118 break
119 119 pos += 1
120 120 else:
121 121 raise error.ParseError(_("unterminated string"), s)
122 122 elif c.isalnum() or c in '_':
123 123 s = pos
124 124 pos += 1
125 125 while pos < end: # find end of symbol
126 126 d = program[pos]
127 127 if not (d.isalnum() or d == "_"):
128 128 break
129 129 pos += 1
130 130 sym = program[s:pos]
131 131 yield ('symbol', sym, s)
132 132 pos -= 1
133 133 elif c == term:
134 134 yield ('end', None, pos + 1)
135 135 return
136 136 else:
137 137 raise error.ParseError(_("syntax error"), pos)
138 138 pos += 1
139 139 if term:
140 140 raise error.ParseError(_("unterminated template expansion"), start)
141 141 yield ('end', None, pos)
142 142
143 143 def _parsetemplate(tmpl, start, stop, quote=''):
144 144 r"""
145 145 >>> _parsetemplate('foo{bar}"baz', 0, 12)
146 146 ([('string', 'foo'), ('symbol', 'bar'), ('string', '"baz')], 12)
147 147 >>> _parsetemplate('foo{bar}"baz', 0, 12, quote='"')
148 148 ([('string', 'foo'), ('symbol', 'bar')], 9)
149 149 >>> _parsetemplate('foo"{bar}', 0, 9, quote='"')
150 150 ([('string', 'foo')], 4)
151 151 >>> _parsetemplate(r'foo\"bar"baz', 0, 12, quote='"')
152 152 ([('string', 'foo"'), ('string', 'bar')], 9)
153 153 >>> _parsetemplate(r'foo\\"bar', 0, 10, quote='"')
154 154 ([('string', 'foo\\')], 6)
155 155 """
156 156 parsed = []
157 157 sepchars = '{' + quote
158 158 pos = start
159 159 p = parser.parser(elements)
160 160 while pos < stop:
161 161 n = min((tmpl.find(c, pos, stop) for c in sepchars),
162 162 key=lambda n: (n < 0, n))
163 163 if n < 0:
164 164 parsed.append(('string', parser.unescapestr(tmpl[pos:stop])))
165 165 pos = stop
166 166 break
167 167 c = tmpl[n]
168 168 bs = (n - pos) - len(tmpl[pos:n].rstrip('\\'))
169 169 if bs % 2 == 1:
170 170 # escaped (e.g. '\{', '\\\{', but not '\\{')
171 171 parsed.append(('string', parser.unescapestr(tmpl[pos:n - 1]) + c))
172 172 pos = n + 1
173 173 continue
174 174 if n > pos:
175 175 parsed.append(('string', parser.unescapestr(tmpl[pos:n])))
176 176 if c == quote:
177 177 return parsed, n + 1
178 178
179 179 parseres, pos = p.parse(tokenize(tmpl, n + 1, stop, '}'))
180 180 parsed.append(parseres)
181 181
182 182 if quote:
183 183 raise error.ParseError(_("unterminated string"), start)
184 184 return parsed, pos
185 185
186 186 def _unnesttemplatelist(tree):
187 187 """Expand list of templates to node tuple
188 188
189 189 >>> def f(tree):
190 190 ... print prettyformat(_unnesttemplatelist(tree))
191 191 >>> f(('template', []))
192 192 ('string', '')
193 193 >>> f(('template', [('string', 'foo')]))
194 194 ('string', 'foo')
195 195 >>> f(('template', [('string', 'foo'), ('symbol', 'rev')]))
196 196 (template
197 197 ('string', 'foo')
198 198 ('symbol', 'rev'))
199 199 >>> f(('template', [('symbol', 'rev')])) # template(rev) -> str
200 200 (template
201 201 ('symbol', 'rev'))
202 202 >>> f(('template', [('template', [('string', 'foo')])]))
203 203 ('string', 'foo')
204 204 """
205 205 if not isinstance(tree, tuple):
206 206 return tree
207 207 op = tree[0]
208 208 if op != 'template':
209 209 return (op,) + tuple(_unnesttemplatelist(x) for x in tree[1:])
210 210
211 211 assert len(tree) == 2
212 212 xs = tuple(_unnesttemplatelist(x) for x in tree[1])
213 213 if not xs:
214 214 return ('string', '') # empty template ""
215 215 elif len(xs) == 1 and xs[0][0] == 'string':
216 216 return xs[0] # fast path for string with no template fragment "x"
217 217 else:
218 218 return (op,) + xs
219 219
220 220 def parse(tmpl):
221 221 """Parse template string into tree"""
222 222 parsed, pos = _parsetemplate(tmpl, 0, len(tmpl))
223 223 assert pos == len(tmpl), 'unquoted template should be consumed'
224 224 return _unnesttemplatelist(('template', parsed))
225 225
226 226 def _parseexpr(expr):
227 227 """Parse a template expression into tree
228 228
229 229 >>> _parseexpr('"foo"')
230 230 ('string', 'foo')
231 231 >>> _parseexpr('foo(bar)')
232 232 ('func', ('symbol', 'foo'), ('symbol', 'bar'))
233 233 >>> _parseexpr('foo(')
234 234 Traceback (most recent call last):
235 235 ...
236 236 ParseError: ('not a prefix: end', 4)
237 237 >>> _parseexpr('"foo" "bar"')
238 238 Traceback (most recent call last):
239 239 ...
240 240 ParseError: ('invalid token', 7)
241 241 """
242 242 p = parser.parser(elements)
243 243 tree, pos = p.parse(tokenize(expr, 0, len(expr)))
244 244 if pos != len(expr):
245 245 raise error.ParseError(_('invalid token'), pos)
246 246 return _unnesttemplatelist(tree)
247 247
248 248 def prettyformat(tree):
249 249 return parser.prettyformat(tree, ('integer', 'string', 'symbol'))
250 250
251 251 def compileexp(exp, context, curmethods):
252 252 """Compile parsed template tree to (func, data) pair"""
253 253 t = exp[0]
254 254 if t in curmethods:
255 255 return curmethods[t](exp, context)
256 256 raise error.ParseError(_("unknown method '%s'") % t)
257 257
258 258 # template evaluation
259 259
260 260 def getsymbol(exp):
261 261 if exp[0] == 'symbol':
262 262 return exp[1]
263 263 raise error.ParseError(_("expected a symbol, got '%s'") % exp[0])
264 264
265 265 def getlist(x):
266 266 if not x:
267 267 return []
268 268 if x[0] == 'list':
269 269 return getlist(x[1]) + [x[2]]
270 270 return [x]
271 271
272 272 def gettemplate(exp, context):
273 273 """Compile given template tree or load named template from map file;
274 274 returns (func, data) pair"""
275 275 if exp[0] in ('template', 'string'):
276 276 return compileexp(exp, context, methods)
277 277 if exp[0] == 'symbol':
278 278 # unlike runsymbol(), here 'symbol' is always taken as template name
279 279 # even if it exists in mapping. this allows us to override mapping
280 280 # by web templates, e.g. 'changelogtag' is redefined in map file.
281 281 return context._load(exp[1])
282 282 raise error.ParseError(_("expected template specifier"))
283 283
284 284 def evalfuncarg(context, mapping, arg):
285 285 func, data = arg
286 286 # func() may return string, generator of strings or arbitrary object such
287 287 # as date tuple, but filter does not want generator.
288 288 thing = func(context, mapping, data)
289 289 if isinstance(thing, types.GeneratorType):
290 290 thing = stringify(thing)
291 291 return thing
292 292
293 293 def evalboolean(context, mapping, arg):
294 294 """Evaluate given argument as boolean, but also takes boolean literals"""
295 295 func, data = arg
296 296 if func is runsymbol:
297 297 thing = func(context, mapping, data, default=None)
298 298 if thing is None:
299 299 # not a template keyword, takes as a boolean literal
300 300 thing = util.parsebool(data)
301 301 else:
302 302 thing = func(context, mapping, data)
303 303 if isinstance(thing, bool):
304 304 return thing
305 305 # other objects are evaluated as strings, which means 0 is True, but
306 306 # empty dict/list should be False as they are expected to be ''
307 307 return bool(stringify(thing))
308 308
309 309 def evalinteger(context, mapping, arg, err):
310 310 v = evalfuncarg(context, mapping, arg)
311 311 try:
312 312 return int(v)
313 313 except (TypeError, ValueError):
314 314 raise error.ParseError(err)
315 315
316 316 def evalstring(context, mapping, arg):
317 317 func, data = arg
318 318 return stringify(func(context, mapping, data))
319 319
320 320 def evalstringliteral(context, mapping, arg):
321 321 """Evaluate given argument as string template, but returns symbol name
322 322 if it is unknown"""
323 323 func, data = arg
324 324 if func is runsymbol:
325 325 thing = func(context, mapping, data, default=data)
326 326 else:
327 327 thing = func(context, mapping, data)
328 328 return stringify(thing)
329 329
330 330 def runinteger(context, mapping, data):
331 331 return int(data)
332 332
333 333 def runstring(context, mapping, data):
334 334 return data
335 335
336 336 def _recursivesymbolblocker(key):
337 337 def showrecursion(**args):
338 338 raise error.Abort(_("recursive reference '%s' in template") % key)
339 339 return showrecursion
340 340
341 341 def _runrecursivesymbol(context, mapping, key):
342 342 raise error.Abort(_("recursive reference '%s' in template") % key)
343 343
344 344 def runsymbol(context, mapping, key, default=''):
345 345 v = mapping.get(key)
346 346 if v is None:
347 347 v = context._defaults.get(key)
348 348 if v is None:
349 349 # put poison to cut recursion. we can't move this to parsing phase
350 350 # because "x = {x}" is allowed if "x" is a keyword. (issue4758)
351 351 safemapping = mapping.copy()
352 352 safemapping[key] = _recursivesymbolblocker(key)
353 353 try:
354 354 v = context.process(key, safemapping)
355 355 except TemplateNotFound:
356 356 v = default
357 357 if callable(v):
358 358 return v(**mapping)
359 359 return v
360 360
361 361 def buildtemplate(exp, context):
362 362 ctmpl = [compileexp(e, context, methods) for e in exp[1:]]
363 363 return (runtemplate, ctmpl)
364 364
365 365 def runtemplate(context, mapping, template):
366 366 for func, data in template:
367 367 yield func(context, mapping, data)
368 368
369 369 def buildfilter(exp, context):
370 370 arg = compileexp(exp[1], context, methods)
371 371 n = getsymbol(exp[2])
372 372 if n in context._filters:
373 373 filt = context._filters[n]
374 374 return (runfilter, (arg, filt))
375 375 if n in funcs:
376 376 f = funcs[n]
377 377 return (f, [arg])
378 378 raise error.ParseError(_("unknown function '%s'") % n)
379 379
380 380 def runfilter(context, mapping, data):
381 381 arg, filt = data
382 382 thing = evalfuncarg(context, mapping, arg)
383 383 try:
384 384 return filt(thing)
385 385 except (ValueError, AttributeError, TypeError):
386 386 if isinstance(arg[1], tuple):
387 387 dt = arg[1][1]
388 388 else:
389 389 dt = arg[1]
390 390 raise error.Abort(_("template filter '%s' is not compatible with "
391 391 "keyword '%s'") % (filt.func_name, dt))
392 392
393 393 def buildmap(exp, context):
394 394 func, data = compileexp(exp[1], context, methods)
395 395 tfunc, tdata = gettemplate(exp[2], context)
396 396 return (runmap, (func, data, tfunc, tdata))
397 397
398 398 def runmap(context, mapping, data):
399 399 func, data, tfunc, tdata = data
400 400 d = func(context, mapping, data)
401 401 if util.safehasattr(d, 'itermaps'):
402 402 diter = d.itermaps()
403 403 else:
404 404 try:
405 405 diter = iter(d)
406 406 except TypeError:
407 407 if func is runsymbol:
408 408 raise error.ParseError(_("keyword '%s' is not iterable") % data)
409 409 else:
410 410 raise error.ParseError(_("%r is not iterable") % d)
411 411
412 412 for i in diter:
413 413 lm = mapping.copy()
414 414 if isinstance(i, dict):
415 415 lm.update(i)
416 416 lm['originalnode'] = mapping.get('node')
417 417 yield tfunc(context, lm, tdata)
418 418 else:
419 419 # v is not an iterable of dicts, this happen when 'key'
420 420 # has been fully expanded already and format is useless.
421 421 # If so, return the expanded value.
422 422 yield i
423 423
424 424 def buildnegate(exp, context):
425 425 arg = compileexp(exp[1], context, exprmethods)
426 426 return (runnegate, arg)
427 427
428 428 def runnegate(context, mapping, data):
429 429 data = evalinteger(context, mapping, data,
430 430 _('negation needs an integer argument'))
431 431 return -data
432 432
433 433 def buildarithmetic(exp, context, func):
434 434 left = compileexp(exp[1], context, exprmethods)
435 435 right = compileexp(exp[2], context, exprmethods)
436 436 return (runarithmetic, (func, left, right))
437 437
438 438 def runarithmetic(context, mapping, data):
439 439 func, left, right = data
440 440 left = evalinteger(context, mapping, left,
441 441 _('arithmetic only defined on integers'))
442 442 right = evalinteger(context, mapping, right,
443 443 _('arithmetic only defined on integers'))
444 444 try:
445 445 return func(left, right)
446 446 except ZeroDivisionError:
447 447 raise error.Abort(_('division by zero is not defined'))
448 448
449 449 def buildfunc(exp, context):
450 450 n = getsymbol(exp[1])
451 451 args = [compileexp(x, context, exprmethods) for x in getlist(exp[2])]
452 452 if n in funcs:
453 453 f = funcs[n]
454 454 return (f, args)
455 455 if n in context._filters:
456 456 if len(args) != 1:
457 457 raise error.ParseError(_("filter %s expects one argument") % n)
458 458 f = context._filters[n]
459 459 return (runfilter, (args[0], f))
460 460 raise error.ParseError(_("unknown function '%s'") % n)
461 461
462 462 # dict of template built-in functions
463 463 funcs = {}
464 464
465 465 templatefunc = registrar.templatefunc(funcs)
466 466
467 467 @templatefunc('date(date[, fmt])')
468 468 def date(context, mapping, args):
469 469 """Format a date. See :hg:`help dates` for formatting
470 470 strings. The default is a Unix date format, including the timezone:
471 471 "Mon Sep 04 15:13:13 2006 0700"."""
472 472 if not (1 <= len(args) <= 2):
473 473 # i18n: "date" is a keyword
474 474 raise error.ParseError(_("date expects one or two arguments"))
475 475
476 476 date = evalfuncarg(context, mapping, args[0])
477 477 fmt = None
478 478 if len(args) == 2:
479 479 fmt = evalstring(context, mapping, args[1])
480 480 try:
481 481 if fmt is None:
482 482 return util.datestr(date)
483 483 else:
484 484 return util.datestr(date, fmt)
485 485 except (TypeError, ValueError):
486 486 # i18n: "date" is a keyword
487 487 raise error.ParseError(_("date expects a date information"))
488 488
489 489 @templatefunc('diff([includepattern [, excludepattern]])')
490 490 def diff(context, mapping, args):
491 491 """Show a diff, optionally
492 492 specifying files to include or exclude."""
493 493 if len(args) > 2:
494 494 # i18n: "diff" is a keyword
495 495 raise error.ParseError(_("diff expects zero, one, or two arguments"))
496 496
497 497 def getpatterns(i):
498 498 if i < len(args):
499 499 s = evalstring(context, mapping, args[i]).strip()
500 500 if s:
501 501 return [s]
502 502 return []
503 503
504 504 ctx = mapping['ctx']
505 505 chunks = ctx.diff(match=ctx.match([], getpatterns(0), getpatterns(1)))
506 506
507 507 return ''.join(chunks)
508 508
509 509 @templatefunc('files(pattern)')
510 510 def files(context, mapping, args):
511 511 """All files of the current changeset matching the pattern. See
512 512 :hg:`help patterns`."""
513 513 if not len(args) == 1:
514 514 # i18n: "files" is a keyword
515 515 raise error.ParseError(_("files expects one argument"))
516 516
517 517 raw = evalstring(context, mapping, args[0])
518 518 ctx = mapping['ctx']
519 519 m = ctx.match([raw])
520 520 files = list(ctx.matches(m))
521 521 return templatekw.showlist("file", files, **mapping)
522 522
523 523 @templatefunc('fill(text[, width[, initialident[, hangindent]]])')
524 524 def fill(context, mapping, args):
525 525 """Fill many
526 526 paragraphs with optional indentation. See the "fill" filter."""
527 527 if not (1 <= len(args) <= 4):
528 528 # i18n: "fill" is a keyword
529 529 raise error.ParseError(_("fill expects one to four arguments"))
530 530
531 531 text = evalstring(context, mapping, args[0])
532 532 width = 76
533 533 initindent = ''
534 534 hangindent = ''
535 535 if 2 <= len(args) <= 4:
536 536 width = evalinteger(context, mapping, args[1],
537 537 # i18n: "fill" is a keyword
538 538 _("fill expects an integer width"))
539 539 try:
540 540 initindent = evalstring(context, mapping, args[2])
541 541 hangindent = evalstring(context, mapping, args[3])
542 542 except IndexError:
543 543 pass
544 544
545 545 return templatefilters.fill(text, width, initindent, hangindent)
546 546
547 @templatefunc('formatnode(node)')
548 def formatnode(context, mapping, args):
549 """Obtain the preferred form of a changeset hash. (DEPRECATED)"""
550 if len(args) != 1:
551 # i18n: "formatnode" is a keyword
552 raise error.ParseError(_("formatnode expects one argument"))
553
554 ui = mapping['ui']
555 node = evalstring(context, mapping, args[0])
556 if ui.debugflag:
557 return node
558 return templatefilters.short(node)
559
547 560 @templatefunc('pad(text, width[, fillchar=\' \'[, left=False]])')
548 561 def pad(context, mapping, args):
549 562 """Pad text with a
550 563 fill character."""
551 564 if not (2 <= len(args) <= 4):
552 565 # i18n: "pad" is a keyword
553 566 raise error.ParseError(_("pad() expects two to four arguments"))
554 567
555 568 width = evalinteger(context, mapping, args[1],
556 569 # i18n: "pad" is a keyword
557 570 _("pad() expects an integer width"))
558 571
559 572 text = evalstring(context, mapping, args[0])
560 573
561 574 left = False
562 575 fillchar = ' '
563 576 if len(args) > 2:
564 577 fillchar = evalstring(context, mapping, args[2])
565 578 if len(args) > 3:
566 579 left = evalboolean(context, mapping, args[3])
567 580
568 581 if left:
569 582 return text.rjust(width, fillchar)
570 583 else:
571 584 return text.ljust(width, fillchar)
572 585
573 586 @templatefunc('indent(text, indentchars[, firstline])')
574 587 def indent(context, mapping, args):
575 588 """Indents all non-empty lines
576 589 with the characters given in the indentchars string. An optional
577 590 third parameter will override the indent for the first line only
578 591 if present."""
579 592 if not (2 <= len(args) <= 3):
580 593 # i18n: "indent" is a keyword
581 594 raise error.ParseError(_("indent() expects two or three arguments"))
582 595
583 596 text = evalstring(context, mapping, args[0])
584 597 indent = evalstring(context, mapping, args[1])
585 598
586 599 if len(args) == 3:
587 600 firstline = evalstring(context, mapping, args[2])
588 601 else:
589 602 firstline = indent
590 603
591 604 # the indent function doesn't indent the first line, so we do it here
592 605 return templatefilters.indent(firstline + text, indent)
593 606
594 607 @templatefunc('get(dict, key)')
595 608 def get(context, mapping, args):
596 609 """Get an attribute/key from an object. Some keywords
597 610 are complex types. This function allows you to obtain the value of an
598 611 attribute on these types."""
599 612 if len(args) != 2:
600 613 # i18n: "get" is a keyword
601 614 raise error.ParseError(_("get() expects two arguments"))
602 615
603 616 dictarg = evalfuncarg(context, mapping, args[0])
604 617 if not util.safehasattr(dictarg, 'get'):
605 618 # i18n: "get" is a keyword
606 619 raise error.ParseError(_("get() expects a dict as first argument"))
607 620
608 621 key = evalfuncarg(context, mapping, args[1])
609 622 return dictarg.get(key)
610 623
611 624 @templatefunc('if(expr, then[, else])')
612 625 def if_(context, mapping, args):
613 626 """Conditionally execute based on the result of
614 627 an expression."""
615 628 if not (2 <= len(args) <= 3):
616 629 # i18n: "if" is a keyword
617 630 raise error.ParseError(_("if expects two or three arguments"))
618 631
619 632 test = evalboolean(context, mapping, args[0])
620 633 if test:
621 634 yield args[1][0](context, mapping, args[1][1])
622 635 elif len(args) == 3:
623 636 yield args[2][0](context, mapping, args[2][1])
624 637
625 638 @templatefunc('ifcontains(needle, haystack, then[, else])')
626 639 def ifcontains(context, mapping, args):
627 640 """Conditionally execute based
628 641 on whether the item "needle" is in "haystack"."""
629 642 if not (3 <= len(args) <= 4):
630 643 # i18n: "ifcontains" is a keyword
631 644 raise error.ParseError(_("ifcontains expects three or four arguments"))
632 645
633 646 needle = evalstring(context, mapping, args[0])
634 647 haystack = evalfuncarg(context, mapping, args[1])
635 648
636 649 if needle in haystack:
637 650 yield args[2][0](context, mapping, args[2][1])
638 651 elif len(args) == 4:
639 652 yield args[3][0](context, mapping, args[3][1])
640 653
641 654 @templatefunc('ifeq(expr1, expr2, then[, else])')
642 655 def ifeq(context, mapping, args):
643 656 """Conditionally execute based on
644 657 whether 2 items are equivalent."""
645 658 if not (3 <= len(args) <= 4):
646 659 # i18n: "ifeq" is a keyword
647 660 raise error.ParseError(_("ifeq expects three or four arguments"))
648 661
649 662 test = evalstring(context, mapping, args[0])
650 663 match = evalstring(context, mapping, args[1])
651 664 if test == match:
652 665 yield args[2][0](context, mapping, args[2][1])
653 666 elif len(args) == 4:
654 667 yield args[3][0](context, mapping, args[3][1])
655 668
656 669 @templatefunc('join(list, sep)')
657 670 def join(context, mapping, args):
658 671 """Join items in a list with a delimiter."""
659 672 if not (1 <= len(args) <= 2):
660 673 # i18n: "join" is a keyword
661 674 raise error.ParseError(_("join expects one or two arguments"))
662 675
663 676 joinset = args[0][0](context, mapping, args[0][1])
664 677 if util.safehasattr(joinset, 'itermaps'):
665 678 jf = joinset.joinfmt
666 679 joinset = [jf(x) for x in joinset.itermaps()]
667 680
668 681 joiner = " "
669 682 if len(args) > 1:
670 683 joiner = evalstring(context, mapping, args[1])
671 684
672 685 first = True
673 686 for x in joinset:
674 687 if first:
675 688 first = False
676 689 else:
677 690 yield joiner
678 691 yield x
679 692
680 693 @templatefunc('label(label, expr)')
681 694 def label(context, mapping, args):
682 695 """Apply a label to generated content. Content with
683 696 a label applied can result in additional post-processing, such as
684 697 automatic colorization."""
685 698 if len(args) != 2:
686 699 # i18n: "label" is a keyword
687 700 raise error.ParseError(_("label expects two arguments"))
688 701
689 702 ui = mapping['ui']
690 703 thing = evalstring(context, mapping, args[1])
691 704 # preserve unknown symbol as literal so effects like 'red', 'bold',
692 705 # etc. don't need to be quoted
693 706 label = evalstringliteral(context, mapping, args[0])
694 707
695 708 return ui.label(thing, label)
696 709
697 710 @templatefunc('latesttag([pattern])')
698 711 def latesttag(context, mapping, args):
699 712 """The global tags matching the given pattern on the
700 713 most recent globally tagged ancestor of this changeset."""
701 714 if len(args) > 1:
702 715 # i18n: "latesttag" is a keyword
703 716 raise error.ParseError(_("latesttag expects at most one argument"))
704 717
705 718 pattern = None
706 719 if len(args) == 1:
707 720 pattern = evalstring(context, mapping, args[0])
708 721
709 722 return templatekw.showlatesttags(pattern, **mapping)
710 723
711 724 @templatefunc('localdate(date[, tz])')
712 725 def localdate(context, mapping, args):
713 726 """Converts a date to the specified timezone.
714 727 The default is local date."""
715 728 if not (1 <= len(args) <= 2):
716 729 # i18n: "localdate" is a keyword
717 730 raise error.ParseError(_("localdate expects one or two arguments"))
718 731
719 732 date = evalfuncarg(context, mapping, args[0])
720 733 try:
721 734 date = util.parsedate(date)
722 735 except AttributeError: # not str nor date tuple
723 736 # i18n: "localdate" is a keyword
724 737 raise error.ParseError(_("localdate expects a date information"))
725 738 if len(args) >= 2:
726 739 tzoffset = None
727 740 tz = evalfuncarg(context, mapping, args[1])
728 741 if isinstance(tz, str):
729 742 tzoffset, remainder = util.parsetimezone(tz)
730 743 if remainder:
731 744 tzoffset = None
732 745 if tzoffset is None:
733 746 try:
734 747 tzoffset = int(tz)
735 748 except (TypeError, ValueError):
736 749 # i18n: "localdate" is a keyword
737 750 raise error.ParseError(_("localdate expects a timezone"))
738 751 else:
739 752 tzoffset = util.makedate()[1]
740 753 return (date[0], tzoffset)
741 754
742 755 @templatefunc('mod(a, b)')
743 756 def mod(context, mapping, args):
744 757 """Calculate a mod b such that a / b + a mod b == a"""
745 758 if not len(args) == 2:
746 759 # i18n: "mod" is a keyword
747 760 raise error.ParseError(_("mod expects two arguments"))
748 761
749 762 func = lambda a, b: a % b
750 763 return runarithmetic(context, mapping, (func, args[0], args[1]))
751 764
752 765 @templatefunc('relpath(path)')
753 766 def relpath(context, mapping, args):
754 767 """Convert a repository-absolute path into a filesystem path relative to
755 768 the current working directory."""
756 769 if len(args) != 1:
757 770 # i18n: "relpath" is a keyword
758 771 raise error.ParseError(_("relpath expects one argument"))
759 772
760 773 repo = mapping['ctx'].repo()
761 774 path = evalstring(context, mapping, args[0])
762 775 return repo.pathto(path)
763 776
764 777 @templatefunc('revset(query[, formatargs...])')
765 778 def revset(context, mapping, args):
766 779 """Execute a revision set query. See
767 780 :hg:`help revset`."""
768 781 if not len(args) > 0:
769 782 # i18n: "revset" is a keyword
770 783 raise error.ParseError(_("revset expects one or more arguments"))
771 784
772 785 raw = evalstring(context, mapping, args[0])
773 786 ctx = mapping['ctx']
774 787 repo = ctx.repo()
775 788
776 789 def query(expr):
777 790 m = revsetmod.match(repo.ui, expr)
778 791 return m(repo)
779 792
780 793 if len(args) > 1:
781 794 formatargs = [evalfuncarg(context, mapping, a) for a in args[1:]]
782 795 revs = query(revsetlang.formatspec(raw, *formatargs))
783 796 revs = list(revs)
784 797 else:
785 798 revsetcache = mapping['cache'].setdefault("revsetcache", {})
786 799 if raw in revsetcache:
787 800 revs = revsetcache[raw]
788 801 else:
789 802 revs = query(raw)
790 803 revs = list(revs)
791 804 revsetcache[raw] = revs
792 805
793 806 return templatekw.showrevslist("revision", revs, **mapping)
794 807
795 808 @templatefunc('rstdoc(text, style)')
796 809 def rstdoc(context, mapping, args):
797 810 """Format reStructuredText."""
798 811 if len(args) != 2:
799 812 # i18n: "rstdoc" is a keyword
800 813 raise error.ParseError(_("rstdoc expects two arguments"))
801 814
802 815 text = evalstring(context, mapping, args[0])
803 816 style = evalstring(context, mapping, args[1])
804 817
805 818 return minirst.format(text, style=style, keep=['verbose'])
806 819
807 820 @templatefunc('separate(sep, args)')
808 821 def separate(context, mapping, args):
809 822 """Add a separator between non-empty arguments."""
810 823 if not args:
811 824 # i18n: "separate" is a keyword
812 825 raise error.ParseError(_("separate expects at least one argument"))
813 826
814 827 sep = evalstring(context, mapping, args[0])
815 828 first = True
816 829 for arg in args[1:]:
817 830 argstr = evalstring(context, mapping, arg)
818 831 if not argstr:
819 832 continue
820 833 if first:
821 834 first = False
822 835 else:
823 836 yield sep
824 837 yield argstr
825 838
826 839 @templatefunc('shortest(node, minlength=4)')
827 840 def shortest(context, mapping, args):
828 841 """Obtain the shortest representation of
829 842 a node."""
830 843 if not (1 <= len(args) <= 2):
831 844 # i18n: "shortest" is a keyword
832 845 raise error.ParseError(_("shortest() expects one or two arguments"))
833 846
834 847 node = evalstring(context, mapping, args[0])
835 848
836 849 minlength = 4
837 850 if len(args) > 1:
838 851 minlength = evalinteger(context, mapping, args[1],
839 852 # i18n: "shortest" is a keyword
840 853 _("shortest() expects an integer minlength"))
841 854
842 855 # _partialmatch() of filtered changelog could take O(len(repo)) time,
843 856 # which would be unacceptably slow. so we look for hash collision in
844 857 # unfiltered space, which means some hashes may be slightly longer.
845 858 cl = mapping['ctx']._repo.unfiltered().changelog
846 859 def isvalid(test):
847 860 try:
848 861 if cl._partialmatch(test) is None:
849 862 return False
850 863
851 864 try:
852 865 i = int(test)
853 866 # if we are a pure int, then starting with zero will not be
854 867 # confused as a rev; or, obviously, if the int is larger than
855 868 # the value of the tip rev
856 869 if test[0] == '0' or i > len(cl):
857 870 return True
858 871 return False
859 872 except ValueError:
860 873 return True
861 874 except error.RevlogError:
862 875 return False
863 876
864 877 shortest = node
865 878 startlength = max(6, minlength)
866 879 length = startlength
867 880 while True:
868 881 test = node[:length]
869 882 if isvalid(test):
870 883 shortest = test
871 884 if length == minlength or length > startlength:
872 885 return shortest
873 886 length -= 1
874 887 else:
875 888 length += 1
876 889 if len(shortest) <= length:
877 890 return shortest
878 891
879 892 @templatefunc('strip(text[, chars])')
880 893 def strip(context, mapping, args):
881 894 """Strip characters from a string. By default,
882 895 strips all leading and trailing whitespace."""
883 896 if not (1 <= len(args) <= 2):
884 897 # i18n: "strip" is a keyword
885 898 raise error.ParseError(_("strip expects one or two arguments"))
886 899
887 900 text = evalstring(context, mapping, args[0])
888 901 if len(args) == 2:
889 902 chars = evalstring(context, mapping, args[1])
890 903 return text.strip(chars)
891 904 return text.strip()
892 905
893 906 @templatefunc('sub(pattern, replacement, expression)')
894 907 def sub(context, mapping, args):
895 908 """Perform text substitution
896 909 using regular expressions."""
897 910 if len(args) != 3:
898 911 # i18n: "sub" is a keyword
899 912 raise error.ParseError(_("sub expects three arguments"))
900 913
901 914 pat = evalstring(context, mapping, args[0])
902 915 rpl = evalstring(context, mapping, args[1])
903 916 src = evalstring(context, mapping, args[2])
904 917 try:
905 918 patre = re.compile(pat)
906 919 except re.error:
907 920 # i18n: "sub" is a keyword
908 921 raise error.ParseError(_("sub got an invalid pattern: %s") % pat)
909 922 try:
910 923 yield patre.sub(rpl, src)
911 924 except re.error:
912 925 # i18n: "sub" is a keyword
913 926 raise error.ParseError(_("sub got an invalid replacement: %s") % rpl)
914 927
915 928 @templatefunc('startswith(pattern, text)')
916 929 def startswith(context, mapping, args):
917 930 """Returns the value from the "text" argument
918 931 if it begins with the content from the "pattern" argument."""
919 932 if len(args) != 2:
920 933 # i18n: "startswith" is a keyword
921 934 raise error.ParseError(_("startswith expects two arguments"))
922 935
923 936 patn = evalstring(context, mapping, args[0])
924 937 text = evalstring(context, mapping, args[1])
925 938 if text.startswith(patn):
926 939 return text
927 940 return ''
928 941
929 942 @templatefunc('word(number, text[, separator])')
930 943 def word(context, mapping, args):
931 944 """Return the nth word from a string."""
932 945 if not (2 <= len(args) <= 3):
933 946 # i18n: "word" is a keyword
934 947 raise error.ParseError(_("word expects two or three arguments, got %d")
935 948 % len(args))
936 949
937 950 num = evalinteger(context, mapping, args[0],
938 951 # i18n: "word" is a keyword
939 952 _("word expects an integer index"))
940 953 text = evalstring(context, mapping, args[1])
941 954 if len(args) == 3:
942 955 splitter = evalstring(context, mapping, args[2])
943 956 else:
944 957 splitter = None
945 958
946 959 tokens = text.split(splitter)
947 960 if num >= len(tokens) or num < -len(tokens):
948 961 return ''
949 962 else:
950 963 return tokens[num]
951 964
952 965 # methods to interpret function arguments or inner expressions (e.g. {_(x)})
953 966 exprmethods = {
954 967 "integer": lambda e, c: (runinteger, e[1]),
955 968 "string": lambda e, c: (runstring, e[1]),
956 969 "symbol": lambda e, c: (runsymbol, e[1]),
957 970 "template": buildtemplate,
958 971 "group": lambda e, c: compileexp(e[1], c, exprmethods),
959 972 # ".": buildmember,
960 973 "|": buildfilter,
961 974 "%": buildmap,
962 975 "func": buildfunc,
963 976 "+": lambda e, c: buildarithmetic(e, c, lambda a, b: a + b),
964 977 "-": lambda e, c: buildarithmetic(e, c, lambda a, b: a - b),
965 978 "negate": buildnegate,
966 979 "*": lambda e, c: buildarithmetic(e, c, lambda a, b: a * b),
967 980 "/": lambda e, c: buildarithmetic(e, c, lambda a, b: a // b),
968 981 }
969 982
970 983 # methods to interpret top-level template (e.g. {x}, {x|_}, {x % "y"})
971 984 methods = exprmethods.copy()
972 985 methods["integer"] = exprmethods["symbol"] # '{1}' as variable
973 986
974 987 class _aliasrules(parser.basealiasrules):
975 988 """Parsing and expansion rule set of template aliases"""
976 989 _section = _('template alias')
977 990 _parse = staticmethod(_parseexpr)
978 991
979 992 @staticmethod
980 993 def _trygetfunc(tree):
981 994 """Return (name, args) if tree is func(...) or ...|filter; otherwise
982 995 None"""
983 996 if tree[0] == 'func' and tree[1][0] == 'symbol':
984 997 return tree[1][1], getlist(tree[2])
985 998 if tree[0] == '|' and tree[2][0] == 'symbol':
986 999 return tree[2][1], [tree[1]]
987 1000
988 1001 def expandaliases(tree, aliases):
989 1002 """Return new tree of aliases are expanded"""
990 1003 aliasmap = _aliasrules.buildmap(aliases)
991 1004 return _aliasrules.expand(aliasmap, tree)
992 1005
993 1006 # template engine
994 1007
995 1008 stringify = templatefilters.stringify
996 1009
997 1010 def _flatten(thing):
998 1011 '''yield a single stream from a possibly nested set of iterators'''
999 1012 if isinstance(thing, str):
1000 1013 yield thing
1001 1014 elif thing is None:
1002 1015 pass
1003 1016 elif not util.safehasattr(thing, '__iter__'):
1004 1017 yield str(thing)
1005 1018 else:
1006 1019 for i in thing:
1007 1020 if isinstance(i, str):
1008 1021 yield i
1009 1022 elif i is None:
1010 1023 pass
1011 1024 elif not util.safehasattr(i, '__iter__'):
1012 1025 yield str(i)
1013 1026 else:
1014 1027 for j in _flatten(i):
1015 1028 yield j
1016 1029
1017 1030 def unquotestring(s):
1018 1031 '''unwrap quotes if any; otherwise returns unmodified string'''
1019 1032 if len(s) < 2 or s[0] not in "'\"" or s[0] != s[-1]:
1020 1033 return s
1021 1034 return s[1:-1]
1022 1035
1023 1036 class engine(object):
1024 1037 '''template expansion engine.
1025 1038
1026 1039 template expansion works like this. a map file contains key=value
1027 1040 pairs. if value is quoted, it is treated as string. otherwise, it
1028 1041 is treated as name of template file.
1029 1042
1030 1043 templater is asked to expand a key in map. it looks up key, and
1031 1044 looks for strings like this: {foo}. it expands {foo} by looking up
1032 1045 foo in map, and substituting it. expansion is recursive: it stops
1033 1046 when there is no more {foo} to replace.
1034 1047
1035 1048 expansion also allows formatting and filtering.
1036 1049
1037 1050 format uses key to expand each item in list. syntax is
1038 1051 {key%format}.
1039 1052
1040 1053 filter uses function to transform value. syntax is
1041 1054 {key|filter1|filter2|...}.'''
1042 1055
1043 1056 def __init__(self, loader, filters=None, defaults=None, aliases=()):
1044 1057 self._loader = loader
1045 1058 if filters is None:
1046 1059 filters = {}
1047 1060 self._filters = filters
1048 1061 if defaults is None:
1049 1062 defaults = {}
1050 1063 self._defaults = defaults
1051 1064 self._aliasmap = _aliasrules.buildmap(aliases)
1052 1065 self._cache = {} # key: (func, data)
1053 1066
1054 1067 def _load(self, t):
1055 1068 '''load, parse, and cache a template'''
1056 1069 if t not in self._cache:
1057 1070 # put poison to cut recursion while compiling 't'
1058 1071 self._cache[t] = (_runrecursivesymbol, t)
1059 1072 try:
1060 1073 x = parse(self._loader(t))
1061 1074 if self._aliasmap:
1062 1075 x = _aliasrules.expand(self._aliasmap, x)
1063 1076 self._cache[t] = compileexp(x, self, methods)
1064 1077 except: # re-raises
1065 1078 del self._cache[t]
1066 1079 raise
1067 1080 return self._cache[t]
1068 1081
1069 1082 def process(self, t, mapping):
1070 1083 '''Perform expansion. t is name of map element to expand.
1071 1084 mapping contains added elements for use during expansion. Is a
1072 1085 generator.'''
1073 1086 func, data = self._load(t)
1074 1087 return _flatten(func(self, mapping, data))
1075 1088
1076 1089 engines = {'default': engine}
1077 1090
1078 1091 def stylelist():
1079 1092 paths = templatepaths()
1080 1093 if not paths:
1081 1094 return _('no templates found, try `hg debuginstall` for more info')
1082 1095 dirlist = os.listdir(paths[0])
1083 1096 stylelist = []
1084 1097 for file in dirlist:
1085 1098 split = file.split(".")
1086 1099 if split[-1] in ('orig', 'rej'):
1087 1100 continue
1088 1101 if split[0] == "map-cmdline":
1089 1102 stylelist.append(split[1])
1090 1103 return ", ".join(sorted(stylelist))
1091 1104
1092 1105 def _readmapfile(mapfile):
1093 1106 """Load template elements from the given map file"""
1094 1107 if not os.path.exists(mapfile):
1095 1108 raise error.Abort(_("style '%s' not found") % mapfile,
1096 1109 hint=_("available styles: %s") % stylelist())
1097 1110
1098 1111 base = os.path.dirname(mapfile)
1099 1112 conf = config.config(includepaths=templatepaths())
1100 1113 conf.read(mapfile)
1101 1114
1102 1115 cache = {}
1103 1116 tmap = {}
1104 1117 for key, val in conf[''].items():
1105 1118 if not val:
1106 1119 raise error.ParseError(_('missing value'), conf.source('', key))
1107 1120 if val[0] in "'\"":
1108 1121 if val[0] != val[-1]:
1109 1122 raise error.ParseError(_('unmatched quotes'),
1110 1123 conf.source('', key))
1111 1124 cache[key] = unquotestring(val)
1112 1125 elif key == "__base__":
1113 1126 # treat as a pointer to a base class for this style
1114 1127 path = util.normpath(os.path.join(base, val))
1115 1128
1116 1129 # fallback check in template paths
1117 1130 if not os.path.exists(path):
1118 1131 for p in templatepaths():
1119 1132 p2 = util.normpath(os.path.join(p, val))
1120 1133 if os.path.isfile(p2):
1121 1134 path = p2
1122 1135 break
1123 1136 p3 = util.normpath(os.path.join(p2, "map"))
1124 1137 if os.path.isfile(p3):
1125 1138 path = p3
1126 1139 break
1127 1140
1128 1141 bcache, btmap = _readmapfile(path)
1129 1142 for k in bcache:
1130 1143 if k not in cache:
1131 1144 cache[k] = bcache[k]
1132 1145 for k in btmap:
1133 1146 if k not in tmap:
1134 1147 tmap[k] = btmap[k]
1135 1148 else:
1136 1149 val = 'default', val
1137 1150 if ':' in val[1]:
1138 1151 val = val[1].split(':', 1)
1139 1152 tmap[key] = val[0], os.path.join(base, val[1])
1140 1153 return cache, tmap
1141 1154
1142 1155 class TemplateNotFound(error.Abort):
1143 1156 pass
1144 1157
1145 1158 class templater(object):
1146 1159
1147 1160 def __init__(self, filters=None, defaults=None, cache=None, aliases=(),
1148 1161 minchunk=1024, maxchunk=65536):
1149 1162 '''set up template engine.
1150 1163 filters is dict of functions. each transforms a value into another.
1151 1164 defaults is dict of default map definitions.
1152 1165 aliases is list of alias (name, replacement) pairs.
1153 1166 '''
1154 1167 if filters is None:
1155 1168 filters = {}
1156 1169 if defaults is None:
1157 1170 defaults = {}
1158 1171 if cache is None:
1159 1172 cache = {}
1160 1173 self.cache = cache.copy()
1161 1174 self.map = {}
1162 1175 self.filters = templatefilters.filters.copy()
1163 1176 self.filters.update(filters)
1164 1177 self.defaults = defaults
1165 1178 self._aliases = aliases
1166 1179 self.minchunk, self.maxchunk = minchunk, maxchunk
1167 1180 self.ecache = {}
1168 1181
1169 1182 @classmethod
1170 1183 def frommapfile(cls, mapfile, filters=None, defaults=None, cache=None,
1171 1184 minchunk=1024, maxchunk=65536):
1172 1185 """Create templater from the specified map file"""
1173 1186 t = cls(filters, defaults, cache, [], minchunk, maxchunk)
1174 1187 cache, tmap = _readmapfile(mapfile)
1175 1188 t.cache.update(cache)
1176 1189 t.map = tmap
1177 1190 return t
1178 1191
1179 1192 def __contains__(self, key):
1180 1193 return key in self.cache or key in self.map
1181 1194
1182 1195 def load(self, t):
1183 1196 '''Get the template for the given template name. Use a local cache.'''
1184 1197 if t not in self.cache:
1185 1198 try:
1186 1199 self.cache[t] = util.readfile(self.map[t][1])
1187 1200 except KeyError as inst:
1188 1201 raise TemplateNotFound(_('"%s" not in template map') %
1189 1202 inst.args[0])
1190 1203 except IOError as inst:
1191 1204 raise IOError(inst.args[0], _('template file %s: %s') %
1192 1205 (self.map[t][1], inst.args[1]))
1193 1206 return self.cache[t]
1194 1207
1195 1208 def __call__(self, t, **mapping):
1196 1209 ttype = t in self.map and self.map[t][0] or 'default'
1197 1210 if ttype not in self.ecache:
1198 1211 try:
1199 1212 ecls = engines[ttype]
1200 1213 except KeyError:
1201 1214 raise error.Abort(_('invalid template engine: %s') % ttype)
1202 1215 self.ecache[ttype] = ecls(self.load, self.filters, self.defaults,
1203 1216 self._aliases)
1204 1217 proc = self.ecache[ttype]
1205 1218
1206 1219 stream = proc.process(t, mapping)
1207 1220 if self.minchunk:
1208 1221 stream = util.increasingchunks(stream, min=self.minchunk,
1209 1222 max=self.maxchunk)
1210 1223 return stream
1211 1224
1212 1225 def templatepaths():
1213 1226 '''return locations used for template files.'''
1214 1227 pathsrel = ['templates']
1215 1228 paths = [os.path.normpath(os.path.join(util.datapath, f))
1216 1229 for f in pathsrel]
1217 1230 return [p for p in paths if os.path.isdir(p)]
1218 1231
1219 1232 def templatepath(name):
1220 1233 '''return location of template file. returns None if not found.'''
1221 1234 for p in templatepaths():
1222 1235 f = os.path.join(p, name)
1223 1236 if os.path.exists(f):
1224 1237 return f
1225 1238 return None
1226 1239
1227 1240 def stylemap(styles, paths=None):
1228 1241 """Return path to mapfile for a given style.
1229 1242
1230 1243 Searches mapfile in the following locations:
1231 1244 1. templatepath/style/map
1232 1245 2. templatepath/map-style
1233 1246 3. templatepath/map
1234 1247 """
1235 1248
1236 1249 if paths is None:
1237 1250 paths = templatepaths()
1238 1251 elif isinstance(paths, str):
1239 1252 paths = [paths]
1240 1253
1241 1254 if isinstance(styles, str):
1242 1255 styles = [styles]
1243 1256
1244 1257 for style in styles:
1245 1258 # only plain name is allowed to honor template paths
1246 1259 if (not style
1247 1260 or style in (os.curdir, os.pardir)
1248 1261 or pycompat.ossep in style
1249 1262 or pycompat.osaltsep and pycompat.osaltsep in style):
1250 1263 continue
1251 1264 locations = [os.path.join(style, 'map'), 'map-' + style]
1252 1265 locations.append('map')
1253 1266
1254 1267 for path in paths:
1255 1268 for location in locations:
1256 1269 mapfile = os.path.join(path, location)
1257 1270 if os.path.isfile(mapfile):
1258 1271 return style, mapfile
1259 1272
1260 1273 raise RuntimeError("No hgweb templates found in %r" % paths)
1261 1274
1262 1275 def loadfunction(ui, extname, registrarobj):
1263 1276 """Load template function from specified registrarobj
1264 1277 """
1265 1278 for name, func in registrarobj._table.iteritems():
1266 1279 funcs[name] = func
1267 1280
1268 1281 # tell hggettext to extract docstrings from these functions:
1269 1282 i18nfunctions = funcs.values()
General Comments 0
You need to be logged in to leave comments. Login now