##// END OF EJS Templates
formatter: make labels work with templated output...
Kostia Balytskyi -
r28384:3356bf61 default
parent child Browse files
Show More
@@ -1,3472 +1,3473 b''
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import cStringIO
11 11 import errno
12 12 import os
13 13 import re
14 14 import sys
15 15 import tempfile
16 16
17 17 from .i18n import _
18 18 from .node import (
19 19 bin,
20 20 hex,
21 21 nullid,
22 22 nullrev,
23 23 short,
24 24 )
25 25
26 26 from . import (
27 27 bookmarks,
28 28 changelog,
29 29 copies,
30 30 crecord as crecordmod,
31 31 encoding,
32 32 error,
33 33 formatter,
34 34 graphmod,
35 35 lock as lockmod,
36 36 match as matchmod,
37 37 obsolete,
38 38 patch,
39 39 pathutil,
40 40 phases,
41 41 repair,
42 42 revlog,
43 43 revset,
44 44 scmutil,
45 45 templatekw,
46 46 templater,
47 47 util,
48 48 )
49 49
50 50 def ishunk(x):
51 51 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
52 52 return isinstance(x, hunkclasses)
53 53
54 54 def newandmodified(chunks, originalchunks):
55 55 newlyaddedandmodifiedfiles = set()
56 56 for chunk in chunks:
57 57 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
58 58 originalchunks:
59 59 newlyaddedandmodifiedfiles.add(chunk.header.filename())
60 60 return newlyaddedandmodifiedfiles
61 61
62 62 def parsealiases(cmd):
63 63 return cmd.lstrip("^").split("|")
64 64
65 65 def setupwrapcolorwrite(ui):
66 66 # wrap ui.write so diff output can be labeled/colorized
67 67 def wrapwrite(orig, *args, **kw):
68 68 label = kw.pop('label', '')
69 69 for chunk, l in patch.difflabel(lambda: args):
70 70 orig(chunk, label=label + l)
71 71
72 72 oldwrite = ui.write
73 73 def wrap(*args, **kwargs):
74 74 return wrapwrite(oldwrite, *args, **kwargs)
75 75 setattr(ui, 'write', wrap)
76 76 return oldwrite
77 77
78 78 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
79 79 if usecurses:
80 80 if testfile:
81 81 recordfn = crecordmod.testdecorator(testfile,
82 82 crecordmod.testchunkselector)
83 83 else:
84 84 recordfn = crecordmod.chunkselector
85 85
86 86 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
87 87
88 88 else:
89 89 return patch.filterpatch(ui, originalhunks, operation)
90 90
91 91 def recordfilter(ui, originalhunks, operation=None):
92 92 """ Prompts the user to filter the originalhunks and return a list of
93 93 selected hunks.
94 94 *operation* is used for ui purposes to indicate the user
95 95 what kind of filtering they are doing: reverting, committing, shelving, etc.
96 96 *operation* has to be a translated string.
97 97 """
98 98 usecurses = crecordmod.checkcurses(ui)
99 99 testfile = ui.config('experimental', 'crecordtest', None)
100 100 oldwrite = setupwrapcolorwrite(ui)
101 101 try:
102 102 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
103 103 testfile, operation)
104 104 finally:
105 105 ui.write = oldwrite
106 106 return newchunks, newopts
107 107
108 108 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
109 109 filterfn, *pats, **opts):
110 110 from . import merge as mergemod
111 111 if not ui.interactive():
112 112 if cmdsuggest:
113 113 msg = _('running non-interactively, use %s instead') % cmdsuggest
114 114 else:
115 115 msg = _('running non-interactively')
116 116 raise error.Abort(msg)
117 117
118 118 # make sure username is set before going interactive
119 119 if not opts.get('user'):
120 120 ui.username() # raise exception, username not provided
121 121
122 122 def recordfunc(ui, repo, message, match, opts):
123 123 """This is generic record driver.
124 124
125 125 Its job is to interactively filter local changes, and
126 126 accordingly prepare working directory into a state in which the
127 127 job can be delegated to a non-interactive commit command such as
128 128 'commit' or 'qrefresh'.
129 129
130 130 After the actual job is done by non-interactive command, the
131 131 working directory is restored to its original state.
132 132
133 133 In the end we'll record interesting changes, and everything else
134 134 will be left in place, so the user can continue working.
135 135 """
136 136
137 137 checkunfinished(repo, commit=True)
138 138 merge = len(repo[None].parents()) > 1
139 139 if merge:
140 140 raise error.Abort(_('cannot partially commit a merge '
141 141 '(use "hg commit" instead)'))
142 142
143 143 status = repo.status(match=match)
144 144 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
145 145 diffopts.nodates = True
146 146 diffopts.git = True
147 147 diffopts.showfunc = True
148 148 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
149 149 originalchunks = patch.parsepatch(originaldiff)
150 150
151 151 # 1. filter patch, so we have intending-to apply subset of it
152 152 try:
153 153 chunks, newopts = filterfn(ui, originalchunks)
154 154 except patch.PatchError as err:
155 155 raise error.Abort(_('error parsing patch: %s') % err)
156 156 opts.update(newopts)
157 157
158 158 # We need to keep a backup of files that have been newly added and
159 159 # modified during the recording process because there is a previous
160 160 # version without the edit in the workdir
161 161 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
162 162 contenders = set()
163 163 for h in chunks:
164 164 try:
165 165 contenders.update(set(h.files()))
166 166 except AttributeError:
167 167 pass
168 168
169 169 changed = status.modified + status.added + status.removed
170 170 newfiles = [f for f in changed if f in contenders]
171 171 if not newfiles:
172 172 ui.status(_('no changes to record\n'))
173 173 return 0
174 174
175 175 modified = set(status.modified)
176 176
177 177 # 2. backup changed files, so we can restore them in the end
178 178
179 179 if backupall:
180 180 tobackup = changed
181 181 else:
182 182 tobackup = [f for f in newfiles if f in modified or f in \
183 183 newlyaddedandmodifiedfiles]
184 184 backups = {}
185 185 if tobackup:
186 186 backupdir = repo.join('record-backups')
187 187 try:
188 188 os.mkdir(backupdir)
189 189 except OSError as err:
190 190 if err.errno != errno.EEXIST:
191 191 raise
192 192 try:
193 193 # backup continues
194 194 for f in tobackup:
195 195 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
196 196 dir=backupdir)
197 197 os.close(fd)
198 198 ui.debug('backup %r as %r\n' % (f, tmpname))
199 199 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
200 200 backups[f] = tmpname
201 201
202 202 fp = cStringIO.StringIO()
203 203 for c in chunks:
204 204 fname = c.filename()
205 205 if fname in backups:
206 206 c.write(fp)
207 207 dopatch = fp.tell()
208 208 fp.seek(0)
209 209
210 210 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
211 211 # 3a. apply filtered patch to clean repo (clean)
212 212 if backups:
213 213 # Equivalent to hg.revert
214 214 m = scmutil.matchfiles(repo, backups.keys())
215 215 mergemod.update(repo, repo.dirstate.p1(),
216 216 False, True, matcher=m)
217 217
218 218 # 3b. (apply)
219 219 if dopatch:
220 220 try:
221 221 ui.debug('applying patch\n')
222 222 ui.debug(fp.getvalue())
223 223 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
224 224 except patch.PatchError as err:
225 225 raise error.Abort(str(err))
226 226 del fp
227 227
228 228 # 4. We prepared working directory according to filtered
229 229 # patch. Now is the time to delegate the job to
230 230 # commit/qrefresh or the like!
231 231
232 232 # Make all of the pathnames absolute.
233 233 newfiles = [repo.wjoin(nf) for nf in newfiles]
234 234 return commitfunc(ui, repo, *newfiles, **opts)
235 235 finally:
236 236 # 5. finally restore backed-up files
237 237 try:
238 238 dirstate = repo.dirstate
239 239 for realname, tmpname in backups.iteritems():
240 240 ui.debug('restoring %r to %r\n' % (tmpname, realname))
241 241
242 242 if dirstate[realname] == 'n':
243 243 # without normallookup, restoring timestamp
244 244 # may cause partially committed files
245 245 # to be treated as unmodified
246 246 dirstate.normallookup(realname)
247 247
248 248 # copystat=True here and above are a hack to trick any
249 249 # editors that have f open that we haven't modified them.
250 250 #
251 251 # Also note that this racy as an editor could notice the
252 252 # file's mtime before we've finished writing it.
253 253 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
254 254 os.unlink(tmpname)
255 255 if tobackup:
256 256 os.rmdir(backupdir)
257 257 except OSError:
258 258 pass
259 259
260 260 def recordinwlock(ui, repo, message, match, opts):
261 261 with repo.wlock():
262 262 return recordfunc(ui, repo, message, match, opts)
263 263
264 264 return commit(ui, repo, recordinwlock, pats, opts)
265 265
266 266 def findpossible(cmd, table, strict=False):
267 267 """
268 268 Return cmd -> (aliases, command table entry)
269 269 for each matching command.
270 270 Return debug commands (or their aliases) only if no normal command matches.
271 271 """
272 272 choice = {}
273 273 debugchoice = {}
274 274
275 275 if cmd in table:
276 276 # short-circuit exact matches, "log" alias beats "^log|history"
277 277 keys = [cmd]
278 278 else:
279 279 keys = table.keys()
280 280
281 281 allcmds = []
282 282 for e in keys:
283 283 aliases = parsealiases(e)
284 284 allcmds.extend(aliases)
285 285 found = None
286 286 if cmd in aliases:
287 287 found = cmd
288 288 elif not strict:
289 289 for a in aliases:
290 290 if a.startswith(cmd):
291 291 found = a
292 292 break
293 293 if found is not None:
294 294 if aliases[0].startswith("debug") or found.startswith("debug"):
295 295 debugchoice[found] = (aliases, table[e])
296 296 else:
297 297 choice[found] = (aliases, table[e])
298 298
299 299 if not choice and debugchoice:
300 300 choice = debugchoice
301 301
302 302 return choice, allcmds
303 303
304 304 def findcmd(cmd, table, strict=True):
305 305 """Return (aliases, command table entry) for command string."""
306 306 choice, allcmds = findpossible(cmd, table, strict)
307 307
308 308 if cmd in choice:
309 309 return choice[cmd]
310 310
311 311 if len(choice) > 1:
312 312 clist = choice.keys()
313 313 clist.sort()
314 314 raise error.AmbiguousCommand(cmd, clist)
315 315
316 316 if choice:
317 317 return choice.values()[0]
318 318
319 319 raise error.UnknownCommand(cmd, allcmds)
320 320
321 321 def findrepo(p):
322 322 while not os.path.isdir(os.path.join(p, ".hg")):
323 323 oldp, p = p, os.path.dirname(p)
324 324 if p == oldp:
325 325 return None
326 326
327 327 return p
328 328
329 329 def bailifchanged(repo, merge=True):
330 330 if merge and repo.dirstate.p2() != nullid:
331 331 raise error.Abort(_('outstanding uncommitted merge'))
332 332 modified, added, removed, deleted = repo.status()[:4]
333 333 if modified or added or removed or deleted:
334 334 raise error.Abort(_('uncommitted changes'))
335 335 ctx = repo[None]
336 336 for s in sorted(ctx.substate):
337 337 ctx.sub(s).bailifchanged()
338 338
339 339 def logmessage(ui, opts):
340 340 """ get the log message according to -m and -l option """
341 341 message = opts.get('message')
342 342 logfile = opts.get('logfile')
343 343
344 344 if message and logfile:
345 345 raise error.Abort(_('options --message and --logfile are mutually '
346 346 'exclusive'))
347 347 if not message and logfile:
348 348 try:
349 349 if logfile == '-':
350 350 message = ui.fin.read()
351 351 else:
352 352 message = '\n'.join(util.readfile(logfile).splitlines())
353 353 except IOError as inst:
354 354 raise error.Abort(_("can't read commit message '%s': %s") %
355 355 (logfile, inst.strerror))
356 356 return message
357 357
358 358 def mergeeditform(ctxorbool, baseformname):
359 359 """return appropriate editform name (referencing a committemplate)
360 360
361 361 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
362 362 merging is committed.
363 363
364 364 This returns baseformname with '.merge' appended if it is a merge,
365 365 otherwise '.normal' is appended.
366 366 """
367 367 if isinstance(ctxorbool, bool):
368 368 if ctxorbool:
369 369 return baseformname + ".merge"
370 370 elif 1 < len(ctxorbool.parents()):
371 371 return baseformname + ".merge"
372 372
373 373 return baseformname + ".normal"
374 374
375 375 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
376 376 editform='', **opts):
377 377 """get appropriate commit message editor according to '--edit' option
378 378
379 379 'finishdesc' is a function to be called with edited commit message
380 380 (= 'description' of the new changeset) just after editing, but
381 381 before checking empty-ness. It should return actual text to be
382 382 stored into history. This allows to change description before
383 383 storing.
384 384
385 385 'extramsg' is a extra message to be shown in the editor instead of
386 386 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
387 387 is automatically added.
388 388
389 389 'editform' is a dot-separated list of names, to distinguish
390 390 the purpose of commit text editing.
391 391
392 392 'getcommiteditor' returns 'commitforceeditor' regardless of
393 393 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
394 394 they are specific for usage in MQ.
395 395 """
396 396 if edit or finishdesc or extramsg:
397 397 return lambda r, c, s: commitforceeditor(r, c, s,
398 398 finishdesc=finishdesc,
399 399 extramsg=extramsg,
400 400 editform=editform)
401 401 elif editform:
402 402 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
403 403 else:
404 404 return commiteditor
405 405
406 406 def loglimit(opts):
407 407 """get the log limit according to option -l/--limit"""
408 408 limit = opts.get('limit')
409 409 if limit:
410 410 try:
411 411 limit = int(limit)
412 412 except ValueError:
413 413 raise error.Abort(_('limit must be a positive integer'))
414 414 if limit <= 0:
415 415 raise error.Abort(_('limit must be positive'))
416 416 else:
417 417 limit = None
418 418 return limit
419 419
420 420 def makefilename(repo, pat, node, desc=None,
421 421 total=None, seqno=None, revwidth=None, pathname=None):
422 422 node_expander = {
423 423 'H': lambda: hex(node),
424 424 'R': lambda: str(repo.changelog.rev(node)),
425 425 'h': lambda: short(node),
426 426 'm': lambda: re.sub('[^\w]', '_', str(desc))
427 427 }
428 428 expander = {
429 429 '%': lambda: '%',
430 430 'b': lambda: os.path.basename(repo.root),
431 431 }
432 432
433 433 try:
434 434 if node:
435 435 expander.update(node_expander)
436 436 if node:
437 437 expander['r'] = (lambda:
438 438 str(repo.changelog.rev(node)).zfill(revwidth or 0))
439 439 if total is not None:
440 440 expander['N'] = lambda: str(total)
441 441 if seqno is not None:
442 442 expander['n'] = lambda: str(seqno)
443 443 if total is not None and seqno is not None:
444 444 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
445 445 if pathname is not None:
446 446 expander['s'] = lambda: os.path.basename(pathname)
447 447 expander['d'] = lambda: os.path.dirname(pathname) or '.'
448 448 expander['p'] = lambda: pathname
449 449
450 450 newname = []
451 451 patlen = len(pat)
452 452 i = 0
453 453 while i < patlen:
454 454 c = pat[i]
455 455 if c == '%':
456 456 i += 1
457 457 c = pat[i]
458 458 c = expander[c]()
459 459 newname.append(c)
460 460 i += 1
461 461 return ''.join(newname)
462 462 except KeyError as inst:
463 463 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
464 464 inst.args[0])
465 465
466 466 class _unclosablefile(object):
467 467 def __init__(self, fp):
468 468 self._fp = fp
469 469
470 470 def close(self):
471 471 pass
472 472
473 473 def __iter__(self):
474 474 return iter(self._fp)
475 475
476 476 def __getattr__(self, attr):
477 477 return getattr(self._fp, attr)
478 478
479 479 def makefileobj(repo, pat, node=None, desc=None, total=None,
480 480 seqno=None, revwidth=None, mode='wb', modemap=None,
481 481 pathname=None):
482 482
483 483 writable = mode not in ('r', 'rb')
484 484
485 485 if not pat or pat == '-':
486 486 if writable:
487 487 fp = repo.ui.fout
488 488 else:
489 489 fp = repo.ui.fin
490 490 return _unclosablefile(fp)
491 491 if util.safehasattr(pat, 'write') and writable:
492 492 return pat
493 493 if util.safehasattr(pat, 'read') and 'r' in mode:
494 494 return pat
495 495 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
496 496 if modemap is not None:
497 497 mode = modemap.get(fn, mode)
498 498 if mode == 'wb':
499 499 modemap[fn] = 'ab'
500 500 return open(fn, mode)
501 501
502 502 def openrevlog(repo, cmd, file_, opts):
503 503 """opens the changelog, manifest, a filelog or a given revlog"""
504 504 cl = opts['changelog']
505 505 mf = opts['manifest']
506 506 dir = opts['dir']
507 507 msg = None
508 508 if cl and mf:
509 509 msg = _('cannot specify --changelog and --manifest at the same time')
510 510 elif cl and dir:
511 511 msg = _('cannot specify --changelog and --dir at the same time')
512 512 elif cl or mf:
513 513 if file_:
514 514 msg = _('cannot specify filename with --changelog or --manifest')
515 515 elif not repo:
516 516 msg = _('cannot specify --changelog or --manifest or --dir '
517 517 'without a repository')
518 518 if msg:
519 519 raise error.Abort(msg)
520 520
521 521 r = None
522 522 if repo:
523 523 if cl:
524 524 r = repo.unfiltered().changelog
525 525 elif dir:
526 526 if 'treemanifest' not in repo.requirements:
527 527 raise error.Abort(_("--dir can only be used on repos with "
528 528 "treemanifest enabled"))
529 529 dirlog = repo.dirlog(file_)
530 530 if len(dirlog):
531 531 r = dirlog
532 532 elif mf:
533 533 r = repo.manifest
534 534 elif file_:
535 535 filelog = repo.file(file_)
536 536 if len(filelog):
537 537 r = filelog
538 538 if not r:
539 539 if not file_:
540 540 raise error.CommandError(cmd, _('invalid arguments'))
541 541 if not os.path.isfile(file_):
542 542 raise error.Abort(_("revlog '%s' not found") % file_)
543 543 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
544 544 file_[:-2] + ".i")
545 545 return r
546 546
547 547 def copy(ui, repo, pats, opts, rename=False):
548 548 # called with the repo lock held
549 549 #
550 550 # hgsep => pathname that uses "/" to separate directories
551 551 # ossep => pathname that uses os.sep to separate directories
552 552 cwd = repo.getcwd()
553 553 targets = {}
554 554 after = opts.get("after")
555 555 dryrun = opts.get("dry_run")
556 556 wctx = repo[None]
557 557
558 558 def walkpat(pat):
559 559 srcs = []
560 560 if after:
561 561 badstates = '?'
562 562 else:
563 563 badstates = '?r'
564 564 m = scmutil.match(repo[None], [pat], opts, globbed=True)
565 565 for abs in repo.walk(m):
566 566 state = repo.dirstate[abs]
567 567 rel = m.rel(abs)
568 568 exact = m.exact(abs)
569 569 if state in badstates:
570 570 if exact and state == '?':
571 571 ui.warn(_('%s: not copying - file is not managed\n') % rel)
572 572 if exact and state == 'r':
573 573 ui.warn(_('%s: not copying - file has been marked for'
574 574 ' remove\n') % rel)
575 575 continue
576 576 # abs: hgsep
577 577 # rel: ossep
578 578 srcs.append((abs, rel, exact))
579 579 return srcs
580 580
581 581 # abssrc: hgsep
582 582 # relsrc: ossep
583 583 # otarget: ossep
584 584 def copyfile(abssrc, relsrc, otarget, exact):
585 585 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
586 586 if '/' in abstarget:
587 587 # We cannot normalize abstarget itself, this would prevent
588 588 # case only renames, like a => A.
589 589 abspath, absname = abstarget.rsplit('/', 1)
590 590 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
591 591 reltarget = repo.pathto(abstarget, cwd)
592 592 target = repo.wjoin(abstarget)
593 593 src = repo.wjoin(abssrc)
594 594 state = repo.dirstate[abstarget]
595 595
596 596 scmutil.checkportable(ui, abstarget)
597 597
598 598 # check for collisions
599 599 prevsrc = targets.get(abstarget)
600 600 if prevsrc is not None:
601 601 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
602 602 (reltarget, repo.pathto(abssrc, cwd),
603 603 repo.pathto(prevsrc, cwd)))
604 604 return
605 605
606 606 # check for overwrites
607 607 exists = os.path.lexists(target)
608 608 samefile = False
609 609 if exists and abssrc != abstarget:
610 610 if (repo.dirstate.normalize(abssrc) ==
611 611 repo.dirstate.normalize(abstarget)):
612 612 if not rename:
613 613 ui.warn(_("%s: can't copy - same file\n") % reltarget)
614 614 return
615 615 exists = False
616 616 samefile = True
617 617
618 618 if not after and exists or after and state in 'mn':
619 619 if not opts['force']:
620 620 ui.warn(_('%s: not overwriting - file exists\n') %
621 621 reltarget)
622 622 return
623 623
624 624 if after:
625 625 if not exists:
626 626 if rename:
627 627 ui.warn(_('%s: not recording move - %s does not exist\n') %
628 628 (relsrc, reltarget))
629 629 else:
630 630 ui.warn(_('%s: not recording copy - %s does not exist\n') %
631 631 (relsrc, reltarget))
632 632 return
633 633 elif not dryrun:
634 634 try:
635 635 if exists:
636 636 os.unlink(target)
637 637 targetdir = os.path.dirname(target) or '.'
638 638 if not os.path.isdir(targetdir):
639 639 os.makedirs(targetdir)
640 640 if samefile:
641 641 tmp = target + "~hgrename"
642 642 os.rename(src, tmp)
643 643 os.rename(tmp, target)
644 644 else:
645 645 util.copyfile(src, target)
646 646 srcexists = True
647 647 except IOError as inst:
648 648 if inst.errno == errno.ENOENT:
649 649 ui.warn(_('%s: deleted in working directory\n') % relsrc)
650 650 srcexists = False
651 651 else:
652 652 ui.warn(_('%s: cannot copy - %s\n') %
653 653 (relsrc, inst.strerror))
654 654 return True # report a failure
655 655
656 656 if ui.verbose or not exact:
657 657 if rename:
658 658 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
659 659 else:
660 660 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
661 661
662 662 targets[abstarget] = abssrc
663 663
664 664 # fix up dirstate
665 665 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
666 666 dryrun=dryrun, cwd=cwd)
667 667 if rename and not dryrun:
668 668 if not after and srcexists and not samefile:
669 669 util.unlinkpath(repo.wjoin(abssrc))
670 670 wctx.forget([abssrc])
671 671
672 672 # pat: ossep
673 673 # dest ossep
674 674 # srcs: list of (hgsep, hgsep, ossep, bool)
675 675 # return: function that takes hgsep and returns ossep
676 676 def targetpathfn(pat, dest, srcs):
677 677 if os.path.isdir(pat):
678 678 abspfx = pathutil.canonpath(repo.root, cwd, pat)
679 679 abspfx = util.localpath(abspfx)
680 680 if destdirexists:
681 681 striplen = len(os.path.split(abspfx)[0])
682 682 else:
683 683 striplen = len(abspfx)
684 684 if striplen:
685 685 striplen += len(os.sep)
686 686 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
687 687 elif destdirexists:
688 688 res = lambda p: os.path.join(dest,
689 689 os.path.basename(util.localpath(p)))
690 690 else:
691 691 res = lambda p: dest
692 692 return res
693 693
694 694 # pat: ossep
695 695 # dest ossep
696 696 # srcs: list of (hgsep, hgsep, ossep, bool)
697 697 # return: function that takes hgsep and returns ossep
698 698 def targetpathafterfn(pat, dest, srcs):
699 699 if matchmod.patkind(pat):
700 700 # a mercurial pattern
701 701 res = lambda p: os.path.join(dest,
702 702 os.path.basename(util.localpath(p)))
703 703 else:
704 704 abspfx = pathutil.canonpath(repo.root, cwd, pat)
705 705 if len(abspfx) < len(srcs[0][0]):
706 706 # A directory. Either the target path contains the last
707 707 # component of the source path or it does not.
708 708 def evalpath(striplen):
709 709 score = 0
710 710 for s in srcs:
711 711 t = os.path.join(dest, util.localpath(s[0])[striplen:])
712 712 if os.path.lexists(t):
713 713 score += 1
714 714 return score
715 715
716 716 abspfx = util.localpath(abspfx)
717 717 striplen = len(abspfx)
718 718 if striplen:
719 719 striplen += len(os.sep)
720 720 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
721 721 score = evalpath(striplen)
722 722 striplen1 = len(os.path.split(abspfx)[0])
723 723 if striplen1:
724 724 striplen1 += len(os.sep)
725 725 if evalpath(striplen1) > score:
726 726 striplen = striplen1
727 727 res = lambda p: os.path.join(dest,
728 728 util.localpath(p)[striplen:])
729 729 else:
730 730 # a file
731 731 if destdirexists:
732 732 res = lambda p: os.path.join(dest,
733 733 os.path.basename(util.localpath(p)))
734 734 else:
735 735 res = lambda p: dest
736 736 return res
737 737
738 738 pats = scmutil.expandpats(pats)
739 739 if not pats:
740 740 raise error.Abort(_('no source or destination specified'))
741 741 if len(pats) == 1:
742 742 raise error.Abort(_('no destination specified'))
743 743 dest = pats.pop()
744 744 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
745 745 if not destdirexists:
746 746 if len(pats) > 1 or matchmod.patkind(pats[0]):
747 747 raise error.Abort(_('with multiple sources, destination must be an '
748 748 'existing directory'))
749 749 if util.endswithsep(dest):
750 750 raise error.Abort(_('destination %s is not a directory') % dest)
751 751
752 752 tfn = targetpathfn
753 753 if after:
754 754 tfn = targetpathafterfn
755 755 copylist = []
756 756 for pat in pats:
757 757 srcs = walkpat(pat)
758 758 if not srcs:
759 759 continue
760 760 copylist.append((tfn(pat, dest, srcs), srcs))
761 761 if not copylist:
762 762 raise error.Abort(_('no files to copy'))
763 763
764 764 errors = 0
765 765 for targetpath, srcs in copylist:
766 766 for abssrc, relsrc, exact in srcs:
767 767 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
768 768 errors += 1
769 769
770 770 if errors:
771 771 ui.warn(_('(consider using --after)\n'))
772 772
773 773 return errors != 0
774 774
775 775 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
776 776 runargs=None, appendpid=False):
777 777 '''Run a command as a service.'''
778 778
779 779 def writepid(pid):
780 780 if opts['pid_file']:
781 781 if appendpid:
782 782 mode = 'a'
783 783 else:
784 784 mode = 'w'
785 785 fp = open(opts['pid_file'], mode)
786 786 fp.write(str(pid) + '\n')
787 787 fp.close()
788 788
789 789 if opts['daemon'] and not opts['daemon_postexec']:
790 790 # Signal child process startup with file removal
791 791 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
792 792 os.close(lockfd)
793 793 try:
794 794 if not runargs:
795 795 runargs = util.hgcmd() + sys.argv[1:]
796 796 runargs.append('--daemon-postexec=unlink:%s' % lockpath)
797 797 # Don't pass --cwd to the child process, because we've already
798 798 # changed directory.
799 799 for i in xrange(1, len(runargs)):
800 800 if runargs[i].startswith('--cwd='):
801 801 del runargs[i]
802 802 break
803 803 elif runargs[i].startswith('--cwd'):
804 804 del runargs[i:i + 2]
805 805 break
806 806 def condfn():
807 807 return not os.path.exists(lockpath)
808 808 pid = util.rundetached(runargs, condfn)
809 809 if pid < 0:
810 810 raise error.Abort(_('child process failed to start'))
811 811 writepid(pid)
812 812 finally:
813 813 try:
814 814 os.unlink(lockpath)
815 815 except OSError as e:
816 816 if e.errno != errno.ENOENT:
817 817 raise
818 818 if parentfn:
819 819 return parentfn(pid)
820 820 else:
821 821 return
822 822
823 823 if initfn:
824 824 initfn()
825 825
826 826 if not opts['daemon']:
827 827 writepid(util.getpid())
828 828
829 829 if opts['daemon_postexec']:
830 830 inst = opts['daemon_postexec']
831 831 try:
832 832 os.setsid()
833 833 except AttributeError:
834 834 pass
835 835 if inst.startswith('unlink:'):
836 836 lockpath = inst[7:]
837 837 os.unlink(lockpath)
838 838 elif inst != 'none':
839 839 raise error.Abort(_('invalid value for --daemon-postexec'))
840 840 util.hidewindow()
841 841 sys.stdout.flush()
842 842 sys.stderr.flush()
843 843
844 844 nullfd = os.open(os.devnull, os.O_RDWR)
845 845 logfilefd = nullfd
846 846 if logfile:
847 847 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
848 848 os.dup2(nullfd, 0)
849 849 os.dup2(logfilefd, 1)
850 850 os.dup2(logfilefd, 2)
851 851 if nullfd not in (0, 1, 2):
852 852 os.close(nullfd)
853 853 if logfile and logfilefd not in (0, 1, 2):
854 854 os.close(logfilefd)
855 855
856 856 if runfn:
857 857 return runfn()
858 858
859 859 ## facility to let extension process additional data into an import patch
860 860 # list of identifier to be executed in order
861 861 extrapreimport = [] # run before commit
862 862 extrapostimport = [] # run after commit
863 863 # mapping from identifier to actual import function
864 864 #
865 865 # 'preimport' are run before the commit is made and are provided the following
866 866 # arguments:
867 867 # - repo: the localrepository instance,
868 868 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
869 869 # - extra: the future extra dictionary of the changeset, please mutate it,
870 870 # - opts: the import options.
871 871 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
872 872 # mutation of in memory commit and more. Feel free to rework the code to get
873 873 # there.
874 874 extrapreimportmap = {}
875 875 # 'postimport' are run after the commit is made and are provided the following
876 876 # argument:
877 877 # - ctx: the changectx created by import.
878 878 extrapostimportmap = {}
879 879
880 880 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
881 881 """Utility function used by commands.import to import a single patch
882 882
883 883 This function is explicitly defined here to help the evolve extension to
884 884 wrap this part of the import logic.
885 885
886 886 The API is currently a bit ugly because it a simple code translation from
887 887 the import command. Feel free to make it better.
888 888
889 889 :hunk: a patch (as a binary string)
890 890 :parents: nodes that will be parent of the created commit
891 891 :opts: the full dict of option passed to the import command
892 892 :msgs: list to save commit message to.
893 893 (used in case we need to save it when failing)
894 894 :updatefunc: a function that update a repo to a given node
895 895 updatefunc(<repo>, <node>)
896 896 """
897 897 # avoid cycle context -> subrepo -> cmdutil
898 898 from . import context
899 899 extractdata = patch.extract(ui, hunk)
900 900 tmpname = extractdata.get('filename')
901 901 message = extractdata.get('message')
902 902 user = opts.get('user') or extractdata.get('user')
903 903 date = opts.get('date') or extractdata.get('date')
904 904 branch = extractdata.get('branch')
905 905 nodeid = extractdata.get('nodeid')
906 906 p1 = extractdata.get('p1')
907 907 p2 = extractdata.get('p2')
908 908
909 909 nocommit = opts.get('no_commit')
910 910 importbranch = opts.get('import_branch')
911 911 update = not opts.get('bypass')
912 912 strip = opts["strip"]
913 913 prefix = opts["prefix"]
914 914 sim = float(opts.get('similarity') or 0)
915 915 if not tmpname:
916 916 return (None, None, False)
917 917
918 918 rejects = False
919 919
920 920 try:
921 921 cmdline_message = logmessage(ui, opts)
922 922 if cmdline_message:
923 923 # pickup the cmdline msg
924 924 message = cmdline_message
925 925 elif message:
926 926 # pickup the patch msg
927 927 message = message.strip()
928 928 else:
929 929 # launch the editor
930 930 message = None
931 931 ui.debug('message:\n%s\n' % message)
932 932
933 933 if len(parents) == 1:
934 934 parents.append(repo[nullid])
935 935 if opts.get('exact'):
936 936 if not nodeid or not p1:
937 937 raise error.Abort(_('not a Mercurial patch'))
938 938 p1 = repo[p1]
939 939 p2 = repo[p2 or nullid]
940 940 elif p2:
941 941 try:
942 942 p1 = repo[p1]
943 943 p2 = repo[p2]
944 944 # Without any options, consider p2 only if the
945 945 # patch is being applied on top of the recorded
946 946 # first parent.
947 947 if p1 != parents[0]:
948 948 p1 = parents[0]
949 949 p2 = repo[nullid]
950 950 except error.RepoError:
951 951 p1, p2 = parents
952 952 if p2.node() == nullid:
953 953 ui.warn(_("warning: import the patch as a normal revision\n"
954 954 "(use --exact to import the patch as a merge)\n"))
955 955 else:
956 956 p1, p2 = parents
957 957
958 958 n = None
959 959 if update:
960 960 if p1 != parents[0]:
961 961 updatefunc(repo, p1.node())
962 962 if p2 != parents[1]:
963 963 repo.setparents(p1.node(), p2.node())
964 964
965 965 if opts.get('exact') or importbranch:
966 966 repo.dirstate.setbranch(branch or 'default')
967 967
968 968 partial = opts.get('partial', False)
969 969 files = set()
970 970 try:
971 971 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
972 972 files=files, eolmode=None, similarity=sim / 100.0)
973 973 except patch.PatchError as e:
974 974 if not partial:
975 975 raise error.Abort(str(e))
976 976 if partial:
977 977 rejects = True
978 978
979 979 files = list(files)
980 980 if nocommit:
981 981 if message:
982 982 msgs.append(message)
983 983 else:
984 984 if opts.get('exact') or p2:
985 985 # If you got here, you either use --force and know what
986 986 # you are doing or used --exact or a merge patch while
987 987 # being updated to its first parent.
988 988 m = None
989 989 else:
990 990 m = scmutil.matchfiles(repo, files or [])
991 991 editform = mergeeditform(repo[None], 'import.normal')
992 992 if opts.get('exact'):
993 993 editor = None
994 994 else:
995 995 editor = getcommiteditor(editform=editform, **opts)
996 996 allowemptyback = repo.ui.backupconfig('ui', 'allowemptycommit')
997 997 extra = {}
998 998 for idfunc in extrapreimport:
999 999 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1000 1000 try:
1001 1001 if partial:
1002 1002 repo.ui.setconfig('ui', 'allowemptycommit', True)
1003 1003 n = repo.commit(message, user,
1004 1004 date, match=m,
1005 1005 editor=editor, extra=extra)
1006 1006 for idfunc in extrapostimport:
1007 1007 extrapostimportmap[idfunc](repo[n])
1008 1008 finally:
1009 1009 repo.ui.restoreconfig(allowemptyback)
1010 1010 else:
1011 1011 if opts.get('exact') or importbranch:
1012 1012 branch = branch or 'default'
1013 1013 else:
1014 1014 branch = p1.branch()
1015 1015 store = patch.filestore()
1016 1016 try:
1017 1017 files = set()
1018 1018 try:
1019 1019 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1020 1020 files, eolmode=None)
1021 1021 except patch.PatchError as e:
1022 1022 raise error.Abort(str(e))
1023 1023 if opts.get('exact'):
1024 1024 editor = None
1025 1025 else:
1026 1026 editor = getcommiteditor(editform='import.bypass')
1027 1027 memctx = context.makememctx(repo, (p1.node(), p2.node()),
1028 1028 message,
1029 1029 user,
1030 1030 date,
1031 1031 branch, files, store,
1032 1032 editor=editor)
1033 1033 n = memctx.commit()
1034 1034 finally:
1035 1035 store.close()
1036 1036 if opts.get('exact') and nocommit:
1037 1037 # --exact with --no-commit is still useful in that it does merge
1038 1038 # and branch bits
1039 1039 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1040 1040 elif opts.get('exact') and hex(n) != nodeid:
1041 1041 raise error.Abort(_('patch is damaged or loses information'))
1042 1042 msg = _('applied to working directory')
1043 1043 if n:
1044 1044 # i18n: refers to a short changeset id
1045 1045 msg = _('created %s') % short(n)
1046 1046 return (msg, n, rejects)
1047 1047 finally:
1048 1048 os.unlink(tmpname)
1049 1049
1050 1050 # facility to let extensions include additional data in an exported patch
1051 1051 # list of identifiers to be executed in order
1052 1052 extraexport = []
1053 1053 # mapping from identifier to actual export function
1054 1054 # function as to return a string to be added to the header or None
1055 1055 # it is given two arguments (sequencenumber, changectx)
1056 1056 extraexportmap = {}
1057 1057
1058 1058 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1059 1059 opts=None, match=None):
1060 1060 '''export changesets as hg patches.'''
1061 1061
1062 1062 total = len(revs)
1063 1063 revwidth = max([len(str(rev)) for rev in revs])
1064 1064 filemode = {}
1065 1065
1066 1066 def single(rev, seqno, fp):
1067 1067 ctx = repo[rev]
1068 1068 node = ctx.node()
1069 1069 parents = [p.node() for p in ctx.parents() if p]
1070 1070 branch = ctx.branch()
1071 1071 if switch_parent:
1072 1072 parents.reverse()
1073 1073
1074 1074 if parents:
1075 1075 prev = parents[0]
1076 1076 else:
1077 1077 prev = nullid
1078 1078
1079 1079 shouldclose = False
1080 1080 if not fp and len(template) > 0:
1081 1081 desc_lines = ctx.description().rstrip().split('\n')
1082 1082 desc = desc_lines[0] #Commit always has a first line.
1083 1083 fp = makefileobj(repo, template, node, desc=desc, total=total,
1084 1084 seqno=seqno, revwidth=revwidth, mode='wb',
1085 1085 modemap=filemode)
1086 1086 shouldclose = True
1087 1087 if fp and not getattr(fp, 'name', '<unnamed>').startswith('<'):
1088 1088 repo.ui.note("%s\n" % fp.name)
1089 1089
1090 1090 if not fp:
1091 1091 write = repo.ui.write
1092 1092 else:
1093 1093 def write(s, **kw):
1094 1094 fp.write(s)
1095 1095
1096 1096 write("# HG changeset patch\n")
1097 1097 write("# User %s\n" % ctx.user())
1098 1098 write("# Date %d %d\n" % ctx.date())
1099 1099 write("# %s\n" % util.datestr(ctx.date()))
1100 1100 if branch and branch != 'default':
1101 1101 write("# Branch %s\n" % branch)
1102 1102 write("# Node ID %s\n" % hex(node))
1103 1103 write("# Parent %s\n" % hex(prev))
1104 1104 if len(parents) > 1:
1105 1105 write("# Parent %s\n" % hex(parents[1]))
1106 1106
1107 1107 for headerid in extraexport:
1108 1108 header = extraexportmap[headerid](seqno, ctx)
1109 1109 if header is not None:
1110 1110 write('# %s\n' % header)
1111 1111 write(ctx.description().rstrip())
1112 1112 write("\n\n")
1113 1113
1114 1114 for chunk, label in patch.diffui(repo, prev, node, match, opts=opts):
1115 1115 write(chunk, label=label)
1116 1116
1117 1117 if shouldclose:
1118 1118 fp.close()
1119 1119
1120 1120 for seqno, rev in enumerate(revs):
1121 1121 single(rev, seqno + 1, fp)
1122 1122
1123 1123 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1124 1124 changes=None, stat=False, fp=None, prefix='',
1125 1125 root='', listsubrepos=False):
1126 1126 '''show diff or diffstat.'''
1127 1127 if fp is None:
1128 1128 write = ui.write
1129 1129 else:
1130 1130 def write(s, **kw):
1131 1131 fp.write(s)
1132 1132
1133 1133 if root:
1134 1134 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1135 1135 else:
1136 1136 relroot = ''
1137 1137 if relroot != '':
1138 1138 # XXX relative roots currently don't work if the root is within a
1139 1139 # subrepo
1140 1140 uirelroot = match.uipath(relroot)
1141 1141 relroot += '/'
1142 1142 for matchroot in match.files():
1143 1143 if not matchroot.startswith(relroot):
1144 1144 ui.warn(_('warning: %s not inside relative root %s\n') % (
1145 1145 match.uipath(matchroot), uirelroot))
1146 1146
1147 1147 if stat:
1148 1148 diffopts = diffopts.copy(context=0)
1149 1149 width = 80
1150 1150 if not ui.plain():
1151 1151 width = ui.termwidth()
1152 1152 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1153 1153 prefix=prefix, relroot=relroot)
1154 1154 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1155 1155 width=width,
1156 1156 git=diffopts.git):
1157 1157 write(chunk, label=label)
1158 1158 else:
1159 1159 for chunk, label in patch.diffui(repo, node1, node2, match,
1160 1160 changes, diffopts, prefix=prefix,
1161 1161 relroot=relroot):
1162 1162 write(chunk, label=label)
1163 1163
1164 1164 if listsubrepos:
1165 1165 ctx1 = repo[node1]
1166 1166 ctx2 = repo[node2]
1167 1167 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1168 1168 tempnode2 = node2
1169 1169 try:
1170 1170 if node2 is not None:
1171 1171 tempnode2 = ctx2.substate[subpath][1]
1172 1172 except KeyError:
1173 1173 # A subrepo that existed in node1 was deleted between node1 and
1174 1174 # node2 (inclusive). Thus, ctx2's substate won't contain that
1175 1175 # subpath. The best we can do is to ignore it.
1176 1176 tempnode2 = None
1177 1177 submatch = matchmod.subdirmatcher(subpath, match)
1178 1178 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1179 1179 stat=stat, fp=fp, prefix=prefix)
1180 1180
1181 1181 class changeset_printer(object):
1182 1182 '''show changeset information when templating not requested.'''
1183 1183
1184 1184 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1185 1185 self.ui = ui
1186 1186 self.repo = repo
1187 1187 self.buffered = buffered
1188 1188 self.matchfn = matchfn
1189 1189 self.diffopts = diffopts
1190 1190 self.header = {}
1191 1191 self.hunk = {}
1192 1192 self.lastheader = None
1193 1193 self.footer = None
1194 1194
1195 1195 def flush(self, ctx):
1196 1196 rev = ctx.rev()
1197 1197 if rev in self.header:
1198 1198 h = self.header[rev]
1199 1199 if h != self.lastheader:
1200 1200 self.lastheader = h
1201 1201 self.ui.write(h)
1202 1202 del self.header[rev]
1203 1203 if rev in self.hunk:
1204 1204 self.ui.write(self.hunk[rev])
1205 1205 del self.hunk[rev]
1206 1206 return 1
1207 1207 return 0
1208 1208
1209 1209 def close(self):
1210 1210 if self.footer:
1211 1211 self.ui.write(self.footer)
1212 1212
1213 1213 def show(self, ctx, copies=None, matchfn=None, **props):
1214 1214 if self.buffered:
1215 1215 self.ui.pushbuffer(labeled=True)
1216 1216 self._show(ctx, copies, matchfn, props)
1217 1217 self.hunk[ctx.rev()] = self.ui.popbuffer()
1218 1218 else:
1219 1219 self._show(ctx, copies, matchfn, props)
1220 1220
1221 1221 def _show(self, ctx, copies, matchfn, props):
1222 1222 '''show a single changeset or file revision'''
1223 1223 changenode = ctx.node()
1224 1224 rev = ctx.rev()
1225 1225 if self.ui.debugflag:
1226 1226 hexfunc = hex
1227 1227 else:
1228 1228 hexfunc = short
1229 1229 # as of now, wctx.node() and wctx.rev() return None, but we want to
1230 1230 # show the same values as {node} and {rev} templatekw
1231 1231 revnode = (scmutil.intrev(rev), hexfunc(bin(ctx.hex())))
1232 1232
1233 1233 if self.ui.quiet:
1234 1234 self.ui.write("%d:%s\n" % revnode, label='log.node')
1235 1235 return
1236 1236
1237 1237 date = util.datestr(ctx.date())
1238 1238
1239 1239 # i18n: column positioning for "hg log"
1240 1240 self.ui.write(_("changeset: %d:%s\n") % revnode,
1241 1241 label='log.changeset changeset.%s' % ctx.phasestr())
1242 1242
1243 1243 # branches are shown first before any other names due to backwards
1244 1244 # compatibility
1245 1245 branch = ctx.branch()
1246 1246 # don't show the default branch name
1247 1247 if branch != 'default':
1248 1248 # i18n: column positioning for "hg log"
1249 1249 self.ui.write(_("branch: %s\n") % branch,
1250 1250 label='log.branch')
1251 1251
1252 1252 for name, ns in self.repo.names.iteritems():
1253 1253 # branches has special logic already handled above, so here we just
1254 1254 # skip it
1255 1255 if name == 'branches':
1256 1256 continue
1257 1257 # we will use the templatename as the color name since those two
1258 1258 # should be the same
1259 1259 for name in ns.names(self.repo, changenode):
1260 1260 self.ui.write(ns.logfmt % name,
1261 1261 label='log.%s' % ns.colorname)
1262 1262 if self.ui.debugflag:
1263 1263 # i18n: column positioning for "hg log"
1264 1264 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1265 1265 label='log.phase')
1266 1266 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1267 1267 label = 'log.parent changeset.%s' % pctx.phasestr()
1268 1268 # i18n: column positioning for "hg log"
1269 1269 self.ui.write(_("parent: %d:%s\n")
1270 1270 % (pctx.rev(), hexfunc(pctx.node())),
1271 1271 label=label)
1272 1272
1273 1273 if self.ui.debugflag and rev is not None:
1274 1274 mnode = ctx.manifestnode()
1275 1275 # i18n: column positioning for "hg log"
1276 1276 self.ui.write(_("manifest: %d:%s\n") %
1277 1277 (self.repo.manifest.rev(mnode), hex(mnode)),
1278 1278 label='ui.debug log.manifest')
1279 1279 # i18n: column positioning for "hg log"
1280 1280 self.ui.write(_("user: %s\n") % ctx.user(),
1281 1281 label='log.user')
1282 1282 # i18n: column positioning for "hg log"
1283 1283 self.ui.write(_("date: %s\n") % date,
1284 1284 label='log.date')
1285 1285
1286 1286 if self.ui.debugflag:
1287 1287 files = ctx.p1().status(ctx)[:3]
1288 1288 for key, value in zip([# i18n: column positioning for "hg log"
1289 1289 _("files:"),
1290 1290 # i18n: column positioning for "hg log"
1291 1291 _("files+:"),
1292 1292 # i18n: column positioning for "hg log"
1293 1293 _("files-:")], files):
1294 1294 if value:
1295 1295 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1296 1296 label='ui.debug log.files')
1297 1297 elif ctx.files() and self.ui.verbose:
1298 1298 # i18n: column positioning for "hg log"
1299 1299 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1300 1300 label='ui.note log.files')
1301 1301 if copies and self.ui.verbose:
1302 1302 copies = ['%s (%s)' % c for c in copies]
1303 1303 # i18n: column positioning for "hg log"
1304 1304 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1305 1305 label='ui.note log.copies')
1306 1306
1307 1307 extra = ctx.extra()
1308 1308 if extra and self.ui.debugflag:
1309 1309 for key, value in sorted(extra.items()):
1310 1310 # i18n: column positioning for "hg log"
1311 1311 self.ui.write(_("extra: %s=%s\n")
1312 1312 % (key, value.encode('string_escape')),
1313 1313 label='ui.debug log.extra')
1314 1314
1315 1315 description = ctx.description().strip()
1316 1316 if description:
1317 1317 if self.ui.verbose:
1318 1318 self.ui.write(_("description:\n"),
1319 1319 label='ui.note log.description')
1320 1320 self.ui.write(description,
1321 1321 label='ui.note log.description')
1322 1322 self.ui.write("\n\n")
1323 1323 else:
1324 1324 # i18n: column positioning for "hg log"
1325 1325 self.ui.write(_("summary: %s\n") %
1326 1326 description.splitlines()[0],
1327 1327 label='log.summary')
1328 1328 self.ui.write("\n")
1329 1329
1330 1330 self.showpatch(ctx, matchfn)
1331 1331
1332 1332 def showpatch(self, ctx, matchfn):
1333 1333 if not matchfn:
1334 1334 matchfn = self.matchfn
1335 1335 if matchfn:
1336 1336 stat = self.diffopts.get('stat')
1337 1337 diff = self.diffopts.get('patch')
1338 1338 diffopts = patch.diffallopts(self.ui, self.diffopts)
1339 1339 node = ctx.node()
1340 1340 prev = ctx.p1().node()
1341 1341 if stat:
1342 1342 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1343 1343 match=matchfn, stat=True)
1344 1344 if diff:
1345 1345 if stat:
1346 1346 self.ui.write("\n")
1347 1347 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1348 1348 match=matchfn, stat=False)
1349 1349 self.ui.write("\n")
1350 1350
1351 1351 class jsonchangeset(changeset_printer):
1352 1352 '''format changeset information.'''
1353 1353
1354 1354 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1355 1355 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1356 1356 self.cache = {}
1357 1357 self._first = True
1358 1358
1359 1359 def close(self):
1360 1360 if not self._first:
1361 1361 self.ui.write("\n]\n")
1362 1362 else:
1363 1363 self.ui.write("[]\n")
1364 1364
1365 1365 def _show(self, ctx, copies, matchfn, props):
1366 1366 '''show a single changeset or file revision'''
1367 1367 rev = ctx.rev()
1368 1368 if rev is None:
1369 1369 jrev = jnode = 'null'
1370 1370 else:
1371 1371 jrev = str(rev)
1372 1372 jnode = '"%s"' % hex(ctx.node())
1373 1373 j = encoding.jsonescape
1374 1374
1375 1375 if self._first:
1376 1376 self.ui.write("[\n {")
1377 1377 self._first = False
1378 1378 else:
1379 1379 self.ui.write(",\n {")
1380 1380
1381 1381 if self.ui.quiet:
1382 1382 self.ui.write('\n "rev": %s' % jrev)
1383 1383 self.ui.write(',\n "node": %s' % jnode)
1384 1384 self.ui.write('\n }')
1385 1385 return
1386 1386
1387 1387 self.ui.write('\n "rev": %s' % jrev)
1388 1388 self.ui.write(',\n "node": %s' % jnode)
1389 1389 self.ui.write(',\n "branch": "%s"' % j(ctx.branch()))
1390 1390 self.ui.write(',\n "phase": "%s"' % ctx.phasestr())
1391 1391 self.ui.write(',\n "user": "%s"' % j(ctx.user()))
1392 1392 self.ui.write(',\n "date": [%d, %d]' % ctx.date())
1393 1393 self.ui.write(',\n "desc": "%s"' % j(ctx.description()))
1394 1394
1395 1395 self.ui.write(',\n "bookmarks": [%s]' %
1396 1396 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1397 1397 self.ui.write(',\n "tags": [%s]' %
1398 1398 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1399 1399 self.ui.write(',\n "parents": [%s]' %
1400 1400 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1401 1401
1402 1402 if self.ui.debugflag:
1403 1403 if rev is None:
1404 1404 jmanifestnode = 'null'
1405 1405 else:
1406 1406 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1407 1407 self.ui.write(',\n "manifest": %s' % jmanifestnode)
1408 1408
1409 1409 self.ui.write(',\n "extra": {%s}' %
1410 1410 ", ".join('"%s": "%s"' % (j(k), j(v))
1411 1411 for k, v in ctx.extra().items()))
1412 1412
1413 1413 files = ctx.p1().status(ctx)
1414 1414 self.ui.write(',\n "modified": [%s]' %
1415 1415 ", ".join('"%s"' % j(f) for f in files[0]))
1416 1416 self.ui.write(',\n "added": [%s]' %
1417 1417 ", ".join('"%s"' % j(f) for f in files[1]))
1418 1418 self.ui.write(',\n "removed": [%s]' %
1419 1419 ", ".join('"%s"' % j(f) for f in files[2]))
1420 1420
1421 1421 elif self.ui.verbose:
1422 1422 self.ui.write(',\n "files": [%s]' %
1423 1423 ", ".join('"%s"' % j(f) for f in ctx.files()))
1424 1424
1425 1425 if copies:
1426 1426 self.ui.write(',\n "copies": {%s}' %
1427 1427 ", ".join('"%s": "%s"' % (j(k), j(v))
1428 1428 for k, v in copies))
1429 1429
1430 1430 matchfn = self.matchfn
1431 1431 if matchfn:
1432 1432 stat = self.diffopts.get('stat')
1433 1433 diff = self.diffopts.get('patch')
1434 1434 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1435 1435 node, prev = ctx.node(), ctx.p1().node()
1436 1436 if stat:
1437 1437 self.ui.pushbuffer()
1438 1438 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1439 1439 match=matchfn, stat=True)
1440 1440 self.ui.write(',\n "diffstat": "%s"' % j(self.ui.popbuffer()))
1441 1441 if diff:
1442 1442 self.ui.pushbuffer()
1443 1443 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1444 1444 match=matchfn, stat=False)
1445 1445 self.ui.write(',\n "diff": "%s"' % j(self.ui.popbuffer()))
1446 1446
1447 1447 self.ui.write("\n }")
1448 1448
1449 1449 class changeset_templater(changeset_printer):
1450 1450 '''format changeset information.'''
1451 1451
1452 1452 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1453 1453 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1454 1454 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
1455 1455 defaulttempl = {
1456 1456 'parent': '{rev}:{node|formatnode} ',
1457 1457 'manifest': '{rev}:{node|formatnode}',
1458 1458 'file_copy': '{name} ({source})',
1459 1459 'extra': '{key}={value|stringescape}'
1460 1460 }
1461 1461 # filecopy is preserved for compatibility reasons
1462 1462 defaulttempl['filecopy'] = defaulttempl['file_copy']
1463 1463 self.t = templater.templater(mapfile, {'formatnode': formatnode},
1464 1464 cache=defaulttempl)
1465 1465 if tmpl:
1466 1466 self.t.cache['changeset'] = tmpl
1467 1467
1468 1468 self.cache = {}
1469 1469
1470 1470 # find correct templates for current mode
1471 1471 tmplmodes = [
1472 1472 (True, None),
1473 1473 (self.ui.verbose, 'verbose'),
1474 1474 (self.ui.quiet, 'quiet'),
1475 1475 (self.ui.debugflag, 'debug'),
1476 1476 ]
1477 1477
1478 1478 self._parts = {'header': '', 'footer': '', 'changeset': 'changeset',
1479 1479 'docheader': '', 'docfooter': ''}
1480 1480 for mode, postfix in tmplmodes:
1481 1481 for t in self._parts:
1482 1482 cur = t
1483 1483 if postfix:
1484 1484 cur += "_" + postfix
1485 1485 if mode and cur in self.t:
1486 1486 self._parts[t] = cur
1487 1487
1488 1488 if self._parts['docheader']:
1489 1489 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1490 1490
1491 1491 def close(self):
1492 1492 if self._parts['docfooter']:
1493 1493 if not self.footer:
1494 1494 self.footer = ""
1495 1495 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1496 1496 return super(changeset_templater, self).close()
1497 1497
1498 1498 def _show(self, ctx, copies, matchfn, props):
1499 1499 '''show a single changeset or file revision'''
1500 1500 props = props.copy()
1501 1501 props.update(templatekw.keywords)
1502 1502 props['templ'] = self.t
1503 1503 props['ctx'] = ctx
1504 1504 props['repo'] = self.repo
1505 props['ui'] = self.repo.ui
1505 1506 props['revcache'] = {'copies': copies}
1506 1507 props['cache'] = self.cache
1507 1508
1508 1509 try:
1509 1510 # write header
1510 1511 if self._parts['header']:
1511 1512 h = templater.stringify(self.t(self._parts['header'], **props))
1512 1513 if self.buffered:
1513 1514 self.header[ctx.rev()] = h
1514 1515 else:
1515 1516 if self.lastheader != h:
1516 1517 self.lastheader = h
1517 1518 self.ui.write(h)
1518 1519
1519 1520 # write changeset metadata, then patch if requested
1520 1521 key = self._parts['changeset']
1521 1522 self.ui.write(templater.stringify(self.t(key, **props)))
1522 1523 self.showpatch(ctx, matchfn)
1523 1524
1524 1525 if self._parts['footer']:
1525 1526 if not self.footer:
1526 1527 self.footer = templater.stringify(
1527 1528 self.t(self._parts['footer'], **props))
1528 1529 except KeyError as inst:
1529 1530 msg = _("%s: no key named '%s'")
1530 1531 raise error.Abort(msg % (self.t.mapfile, inst.args[0]))
1531 1532 except SyntaxError as inst:
1532 1533 raise error.Abort('%s: %s' % (self.t.mapfile, inst.args[0]))
1533 1534
1534 1535 def gettemplate(ui, tmpl, style):
1535 1536 """
1536 1537 Find the template matching the given template spec or style.
1537 1538 """
1538 1539
1539 1540 # ui settings
1540 1541 if not tmpl and not style: # template are stronger than style
1541 1542 tmpl = ui.config('ui', 'logtemplate')
1542 1543 if tmpl:
1543 1544 try:
1544 1545 tmpl = templater.unquotestring(tmpl)
1545 1546 except SyntaxError:
1546 1547 pass
1547 1548 return tmpl, None
1548 1549 else:
1549 1550 style = util.expandpath(ui.config('ui', 'style', ''))
1550 1551
1551 1552 if not tmpl and style:
1552 1553 mapfile = style
1553 1554 if not os.path.split(mapfile)[0]:
1554 1555 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1555 1556 or templater.templatepath(mapfile))
1556 1557 if mapname:
1557 1558 mapfile = mapname
1558 1559 return None, mapfile
1559 1560
1560 1561 if not tmpl:
1561 1562 return None, None
1562 1563
1563 1564 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1564 1565
1565 1566 def show_changeset(ui, repo, opts, buffered=False):
1566 1567 """show one changeset using template or regular display.
1567 1568
1568 1569 Display format will be the first non-empty hit of:
1569 1570 1. option 'template'
1570 1571 2. option 'style'
1571 1572 3. [ui] setting 'logtemplate'
1572 1573 4. [ui] setting 'style'
1573 1574 If all of these values are either the unset or the empty string,
1574 1575 regular display via changeset_printer() is done.
1575 1576 """
1576 1577 # options
1577 1578 matchfn = None
1578 1579 if opts.get('patch') or opts.get('stat'):
1579 1580 matchfn = scmutil.matchall(repo)
1580 1581
1581 1582 if opts.get('template') == 'json':
1582 1583 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1583 1584
1584 1585 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1585 1586
1586 1587 if not tmpl and not mapfile:
1587 1588 return changeset_printer(ui, repo, matchfn, opts, buffered)
1588 1589
1589 1590 try:
1590 1591 t = changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile,
1591 1592 buffered)
1592 1593 except SyntaxError as inst:
1593 1594 raise error.Abort(inst.args[0])
1594 1595 return t
1595 1596
1596 1597 def showmarker(ui, marker):
1597 1598 """utility function to display obsolescence marker in a readable way
1598 1599
1599 1600 To be used by debug function."""
1600 1601 ui.write(hex(marker.precnode()))
1601 1602 for repl in marker.succnodes():
1602 1603 ui.write(' ')
1603 1604 ui.write(hex(repl))
1604 1605 ui.write(' %X ' % marker.flags())
1605 1606 parents = marker.parentnodes()
1606 1607 if parents is not None:
1607 1608 ui.write('{%s} ' % ', '.join(hex(p) for p in parents))
1608 1609 ui.write('(%s) ' % util.datestr(marker.date()))
1609 1610 ui.write('{%s}' % (', '.join('%r: %r' % t for t in
1610 1611 sorted(marker.metadata().items())
1611 1612 if t[0] != 'date')))
1612 1613 ui.write('\n')
1613 1614
1614 1615 def finddate(ui, repo, date):
1615 1616 """Find the tipmost changeset that matches the given date spec"""
1616 1617
1617 1618 df = util.matchdate(date)
1618 1619 m = scmutil.matchall(repo)
1619 1620 results = {}
1620 1621
1621 1622 def prep(ctx, fns):
1622 1623 d = ctx.date()
1623 1624 if df(d[0]):
1624 1625 results[ctx.rev()] = d
1625 1626
1626 1627 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1627 1628 rev = ctx.rev()
1628 1629 if rev in results:
1629 1630 ui.status(_("found revision %s from %s\n") %
1630 1631 (rev, util.datestr(results[rev])))
1631 1632 return str(rev)
1632 1633
1633 1634 raise error.Abort(_("revision matching date not found"))
1634 1635
1635 1636 def increasingwindows(windowsize=8, sizelimit=512):
1636 1637 while True:
1637 1638 yield windowsize
1638 1639 if windowsize < sizelimit:
1639 1640 windowsize *= 2
1640 1641
1641 1642 class FileWalkError(Exception):
1642 1643 pass
1643 1644
1644 1645 def walkfilerevs(repo, match, follow, revs, fncache):
1645 1646 '''Walks the file history for the matched files.
1646 1647
1647 1648 Returns the changeset revs that are involved in the file history.
1648 1649
1649 1650 Throws FileWalkError if the file history can't be walked using
1650 1651 filelogs alone.
1651 1652 '''
1652 1653 wanted = set()
1653 1654 copies = []
1654 1655 minrev, maxrev = min(revs), max(revs)
1655 1656 def filerevgen(filelog, last):
1656 1657 """
1657 1658 Only files, no patterns. Check the history of each file.
1658 1659
1659 1660 Examines filelog entries within minrev, maxrev linkrev range
1660 1661 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1661 1662 tuples in backwards order
1662 1663 """
1663 1664 cl_count = len(repo)
1664 1665 revs = []
1665 1666 for j in xrange(0, last + 1):
1666 1667 linkrev = filelog.linkrev(j)
1667 1668 if linkrev < minrev:
1668 1669 continue
1669 1670 # only yield rev for which we have the changelog, it can
1670 1671 # happen while doing "hg log" during a pull or commit
1671 1672 if linkrev >= cl_count:
1672 1673 break
1673 1674
1674 1675 parentlinkrevs = []
1675 1676 for p in filelog.parentrevs(j):
1676 1677 if p != nullrev:
1677 1678 parentlinkrevs.append(filelog.linkrev(p))
1678 1679 n = filelog.node(j)
1679 1680 revs.append((linkrev, parentlinkrevs,
1680 1681 follow and filelog.renamed(n)))
1681 1682
1682 1683 return reversed(revs)
1683 1684 def iterfiles():
1684 1685 pctx = repo['.']
1685 1686 for filename in match.files():
1686 1687 if follow:
1687 1688 if filename not in pctx:
1688 1689 raise error.Abort(_('cannot follow file not in parent '
1689 1690 'revision: "%s"') % filename)
1690 1691 yield filename, pctx[filename].filenode()
1691 1692 else:
1692 1693 yield filename, None
1693 1694 for filename_node in copies:
1694 1695 yield filename_node
1695 1696
1696 1697 for file_, node in iterfiles():
1697 1698 filelog = repo.file(file_)
1698 1699 if not len(filelog):
1699 1700 if node is None:
1700 1701 # A zero count may be a directory or deleted file, so
1701 1702 # try to find matching entries on the slow path.
1702 1703 if follow:
1703 1704 raise error.Abort(
1704 1705 _('cannot follow nonexistent file: "%s"') % file_)
1705 1706 raise FileWalkError("Cannot walk via filelog")
1706 1707 else:
1707 1708 continue
1708 1709
1709 1710 if node is None:
1710 1711 last = len(filelog) - 1
1711 1712 else:
1712 1713 last = filelog.rev(node)
1713 1714
1714 1715 # keep track of all ancestors of the file
1715 1716 ancestors = set([filelog.linkrev(last)])
1716 1717
1717 1718 # iterate from latest to oldest revision
1718 1719 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1719 1720 if not follow:
1720 1721 if rev > maxrev:
1721 1722 continue
1722 1723 else:
1723 1724 # Note that last might not be the first interesting
1724 1725 # rev to us:
1725 1726 # if the file has been changed after maxrev, we'll
1726 1727 # have linkrev(last) > maxrev, and we still need
1727 1728 # to explore the file graph
1728 1729 if rev not in ancestors:
1729 1730 continue
1730 1731 # XXX insert 1327 fix here
1731 1732 if flparentlinkrevs:
1732 1733 ancestors.update(flparentlinkrevs)
1733 1734
1734 1735 fncache.setdefault(rev, []).append(file_)
1735 1736 wanted.add(rev)
1736 1737 if copied:
1737 1738 copies.append(copied)
1738 1739
1739 1740 return wanted
1740 1741
1741 1742 class _followfilter(object):
1742 1743 def __init__(self, repo, onlyfirst=False):
1743 1744 self.repo = repo
1744 1745 self.startrev = nullrev
1745 1746 self.roots = set()
1746 1747 self.onlyfirst = onlyfirst
1747 1748
1748 1749 def match(self, rev):
1749 1750 def realparents(rev):
1750 1751 if self.onlyfirst:
1751 1752 return self.repo.changelog.parentrevs(rev)[0:1]
1752 1753 else:
1753 1754 return filter(lambda x: x != nullrev,
1754 1755 self.repo.changelog.parentrevs(rev))
1755 1756
1756 1757 if self.startrev == nullrev:
1757 1758 self.startrev = rev
1758 1759 return True
1759 1760
1760 1761 if rev > self.startrev:
1761 1762 # forward: all descendants
1762 1763 if not self.roots:
1763 1764 self.roots.add(self.startrev)
1764 1765 for parent in realparents(rev):
1765 1766 if parent in self.roots:
1766 1767 self.roots.add(rev)
1767 1768 return True
1768 1769 else:
1769 1770 # backwards: all parents
1770 1771 if not self.roots:
1771 1772 self.roots.update(realparents(self.startrev))
1772 1773 if rev in self.roots:
1773 1774 self.roots.remove(rev)
1774 1775 self.roots.update(realparents(rev))
1775 1776 return True
1776 1777
1777 1778 return False
1778 1779
1779 1780 def walkchangerevs(repo, match, opts, prepare):
1780 1781 '''Iterate over files and the revs in which they changed.
1781 1782
1782 1783 Callers most commonly need to iterate backwards over the history
1783 1784 in which they are interested. Doing so has awful (quadratic-looking)
1784 1785 performance, so we use iterators in a "windowed" way.
1785 1786
1786 1787 We walk a window of revisions in the desired order. Within the
1787 1788 window, we first walk forwards to gather data, then in the desired
1788 1789 order (usually backwards) to display it.
1789 1790
1790 1791 This function returns an iterator yielding contexts. Before
1791 1792 yielding each context, the iterator will first call the prepare
1792 1793 function on each context in the window in forward order.'''
1793 1794
1794 1795 follow = opts.get('follow') or opts.get('follow_first')
1795 1796 revs = _logrevs(repo, opts)
1796 1797 if not revs:
1797 1798 return []
1798 1799 wanted = set()
1799 1800 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1800 1801 opts.get('removed'))
1801 1802 fncache = {}
1802 1803 change = repo.changectx
1803 1804
1804 1805 # First step is to fill wanted, the set of revisions that we want to yield.
1805 1806 # When it does not induce extra cost, we also fill fncache for revisions in
1806 1807 # wanted: a cache of filenames that were changed (ctx.files()) and that
1807 1808 # match the file filtering conditions.
1808 1809
1809 1810 if match.always():
1810 1811 # No files, no patterns. Display all revs.
1811 1812 wanted = revs
1812 1813 elif not slowpath:
1813 1814 # We only have to read through the filelog to find wanted revisions
1814 1815
1815 1816 try:
1816 1817 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1817 1818 except FileWalkError:
1818 1819 slowpath = True
1819 1820
1820 1821 # We decided to fall back to the slowpath because at least one
1821 1822 # of the paths was not a file. Check to see if at least one of them
1822 1823 # existed in history, otherwise simply return
1823 1824 for path in match.files():
1824 1825 if path == '.' or path in repo.store:
1825 1826 break
1826 1827 else:
1827 1828 return []
1828 1829
1829 1830 if slowpath:
1830 1831 # We have to read the changelog to match filenames against
1831 1832 # changed files
1832 1833
1833 1834 if follow:
1834 1835 raise error.Abort(_('can only follow copies/renames for explicit '
1835 1836 'filenames'))
1836 1837
1837 1838 # The slow path checks files modified in every changeset.
1838 1839 # This is really slow on large repos, so compute the set lazily.
1839 1840 class lazywantedset(object):
1840 1841 def __init__(self):
1841 1842 self.set = set()
1842 1843 self.revs = set(revs)
1843 1844
1844 1845 # No need to worry about locality here because it will be accessed
1845 1846 # in the same order as the increasing window below.
1846 1847 def __contains__(self, value):
1847 1848 if value in self.set:
1848 1849 return True
1849 1850 elif not value in self.revs:
1850 1851 return False
1851 1852 else:
1852 1853 self.revs.discard(value)
1853 1854 ctx = change(value)
1854 1855 matches = filter(match, ctx.files())
1855 1856 if matches:
1856 1857 fncache[value] = matches
1857 1858 self.set.add(value)
1858 1859 return True
1859 1860 return False
1860 1861
1861 1862 def discard(self, value):
1862 1863 self.revs.discard(value)
1863 1864 self.set.discard(value)
1864 1865
1865 1866 wanted = lazywantedset()
1866 1867
1867 1868 # it might be worthwhile to do this in the iterator if the rev range
1868 1869 # is descending and the prune args are all within that range
1869 1870 for rev in opts.get('prune', ()):
1870 1871 rev = repo[rev].rev()
1871 1872 ff = _followfilter(repo)
1872 1873 stop = min(revs[0], revs[-1])
1873 1874 for x in xrange(rev, stop - 1, -1):
1874 1875 if ff.match(x):
1875 1876 wanted = wanted - [x]
1876 1877
1877 1878 # Now that wanted is correctly initialized, we can iterate over the
1878 1879 # revision range, yielding only revisions in wanted.
1879 1880 def iterate():
1880 1881 if follow and match.always():
1881 1882 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1882 1883 def want(rev):
1883 1884 return ff.match(rev) and rev in wanted
1884 1885 else:
1885 1886 def want(rev):
1886 1887 return rev in wanted
1887 1888
1888 1889 it = iter(revs)
1889 1890 stopiteration = False
1890 1891 for windowsize in increasingwindows():
1891 1892 nrevs = []
1892 1893 for i in xrange(windowsize):
1893 1894 rev = next(it, None)
1894 1895 if rev is None:
1895 1896 stopiteration = True
1896 1897 break
1897 1898 elif want(rev):
1898 1899 nrevs.append(rev)
1899 1900 for rev in sorted(nrevs):
1900 1901 fns = fncache.get(rev)
1901 1902 ctx = change(rev)
1902 1903 if not fns:
1903 1904 def fns_generator():
1904 1905 for f in ctx.files():
1905 1906 if match(f):
1906 1907 yield f
1907 1908 fns = fns_generator()
1908 1909 prepare(ctx, fns)
1909 1910 for rev in nrevs:
1910 1911 yield change(rev)
1911 1912
1912 1913 if stopiteration:
1913 1914 break
1914 1915
1915 1916 return iterate()
1916 1917
1917 1918 def _makefollowlogfilematcher(repo, files, followfirst):
1918 1919 # When displaying a revision with --patch --follow FILE, we have
1919 1920 # to know which file of the revision must be diffed. With
1920 1921 # --follow, we want the names of the ancestors of FILE in the
1921 1922 # revision, stored in "fcache". "fcache" is populated by
1922 1923 # reproducing the graph traversal already done by --follow revset
1923 1924 # and relating linkrevs to file names (which is not "correct" but
1924 1925 # good enough).
1925 1926 fcache = {}
1926 1927 fcacheready = [False]
1927 1928 pctx = repo['.']
1928 1929
1929 1930 def populate():
1930 1931 for fn in files:
1931 1932 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
1932 1933 for c in i:
1933 1934 fcache.setdefault(c.linkrev(), set()).add(c.path())
1934 1935
1935 1936 def filematcher(rev):
1936 1937 if not fcacheready[0]:
1937 1938 # Lazy initialization
1938 1939 fcacheready[0] = True
1939 1940 populate()
1940 1941 return scmutil.matchfiles(repo, fcache.get(rev, []))
1941 1942
1942 1943 return filematcher
1943 1944
1944 1945 def _makenofollowlogfilematcher(repo, pats, opts):
1945 1946 '''hook for extensions to override the filematcher for non-follow cases'''
1946 1947 return None
1947 1948
1948 1949 def _makelogrevset(repo, pats, opts, revs):
1949 1950 """Return (expr, filematcher) where expr is a revset string built
1950 1951 from log options and file patterns or None. If --stat or --patch
1951 1952 are not passed filematcher is None. Otherwise it is a callable
1952 1953 taking a revision number and returning a match objects filtering
1953 1954 the files to be detailed when displaying the revision.
1954 1955 """
1955 1956 opt2revset = {
1956 1957 'no_merges': ('not merge()', None),
1957 1958 'only_merges': ('merge()', None),
1958 1959 '_ancestors': ('ancestors(%(val)s)', None),
1959 1960 '_fancestors': ('_firstancestors(%(val)s)', None),
1960 1961 '_descendants': ('descendants(%(val)s)', None),
1961 1962 '_fdescendants': ('_firstdescendants(%(val)s)', None),
1962 1963 '_matchfiles': ('_matchfiles(%(val)s)', None),
1963 1964 'date': ('date(%(val)r)', None),
1964 1965 'branch': ('branch(%(val)r)', ' or '),
1965 1966 '_patslog': ('filelog(%(val)r)', ' or '),
1966 1967 '_patsfollow': ('follow(%(val)r)', ' or '),
1967 1968 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
1968 1969 'keyword': ('keyword(%(val)r)', ' or '),
1969 1970 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
1970 1971 'user': ('user(%(val)r)', ' or '),
1971 1972 }
1972 1973
1973 1974 opts = dict(opts)
1974 1975 # follow or not follow?
1975 1976 follow = opts.get('follow') or opts.get('follow_first')
1976 1977 if opts.get('follow_first'):
1977 1978 followfirst = 1
1978 1979 else:
1979 1980 followfirst = 0
1980 1981 # --follow with FILE behavior depends on revs...
1981 1982 it = iter(revs)
1982 1983 startrev = it.next()
1983 1984 followdescendants = startrev < next(it, startrev)
1984 1985
1985 1986 # branch and only_branch are really aliases and must be handled at
1986 1987 # the same time
1987 1988 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
1988 1989 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
1989 1990 # pats/include/exclude are passed to match.match() directly in
1990 1991 # _matchfiles() revset but walkchangerevs() builds its matcher with
1991 1992 # scmutil.match(). The difference is input pats are globbed on
1992 1993 # platforms without shell expansion (windows).
1993 1994 wctx = repo[None]
1994 1995 match, pats = scmutil.matchandpats(wctx, pats, opts)
1995 1996 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1996 1997 opts.get('removed'))
1997 1998 if not slowpath:
1998 1999 for f in match.files():
1999 2000 if follow and f not in wctx:
2000 2001 # If the file exists, it may be a directory, so let it
2001 2002 # take the slow path.
2002 2003 if os.path.exists(repo.wjoin(f)):
2003 2004 slowpath = True
2004 2005 continue
2005 2006 else:
2006 2007 raise error.Abort(_('cannot follow file not in parent '
2007 2008 'revision: "%s"') % f)
2008 2009 filelog = repo.file(f)
2009 2010 if not filelog:
2010 2011 # A zero count may be a directory or deleted file, so
2011 2012 # try to find matching entries on the slow path.
2012 2013 if follow:
2013 2014 raise error.Abort(
2014 2015 _('cannot follow nonexistent file: "%s"') % f)
2015 2016 slowpath = True
2016 2017
2017 2018 # We decided to fall back to the slowpath because at least one
2018 2019 # of the paths was not a file. Check to see if at least one of them
2019 2020 # existed in history - in that case, we'll continue down the
2020 2021 # slowpath; otherwise, we can turn off the slowpath
2021 2022 if slowpath:
2022 2023 for path in match.files():
2023 2024 if path == '.' or path in repo.store:
2024 2025 break
2025 2026 else:
2026 2027 slowpath = False
2027 2028
2028 2029 fpats = ('_patsfollow', '_patsfollowfirst')
2029 2030 fnopats = (('_ancestors', '_fancestors'),
2030 2031 ('_descendants', '_fdescendants'))
2031 2032 if slowpath:
2032 2033 # See walkchangerevs() slow path.
2033 2034 #
2034 2035 # pats/include/exclude cannot be represented as separate
2035 2036 # revset expressions as their filtering logic applies at file
2036 2037 # level. For instance "-I a -X a" matches a revision touching
2037 2038 # "a" and "b" while "file(a) and not file(b)" does
2038 2039 # not. Besides, filesets are evaluated against the working
2039 2040 # directory.
2040 2041 matchargs = ['r:', 'd:relpath']
2041 2042 for p in pats:
2042 2043 matchargs.append('p:' + p)
2043 2044 for p in opts.get('include', []):
2044 2045 matchargs.append('i:' + p)
2045 2046 for p in opts.get('exclude', []):
2046 2047 matchargs.append('x:' + p)
2047 2048 matchargs = ','.join(('%r' % p) for p in matchargs)
2048 2049 opts['_matchfiles'] = matchargs
2049 2050 if follow:
2050 2051 opts[fnopats[0][followfirst]] = '.'
2051 2052 else:
2052 2053 if follow:
2053 2054 if pats:
2054 2055 # follow() revset interprets its file argument as a
2055 2056 # manifest entry, so use match.files(), not pats.
2056 2057 opts[fpats[followfirst]] = list(match.files())
2057 2058 else:
2058 2059 op = fnopats[followdescendants][followfirst]
2059 2060 opts[op] = 'rev(%d)' % startrev
2060 2061 else:
2061 2062 opts['_patslog'] = list(pats)
2062 2063
2063 2064 filematcher = None
2064 2065 if opts.get('patch') or opts.get('stat'):
2065 2066 # When following files, track renames via a special matcher.
2066 2067 # If we're forced to take the slowpath it means we're following
2067 2068 # at least one pattern/directory, so don't bother with rename tracking.
2068 2069 if follow and not match.always() and not slowpath:
2069 2070 # _makefollowlogfilematcher expects its files argument to be
2070 2071 # relative to the repo root, so use match.files(), not pats.
2071 2072 filematcher = _makefollowlogfilematcher(repo, match.files(),
2072 2073 followfirst)
2073 2074 else:
2074 2075 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2075 2076 if filematcher is None:
2076 2077 filematcher = lambda rev: match
2077 2078
2078 2079 expr = []
2079 2080 for op, val in sorted(opts.iteritems()):
2080 2081 if not val:
2081 2082 continue
2082 2083 if op not in opt2revset:
2083 2084 continue
2084 2085 revop, andor = opt2revset[op]
2085 2086 if '%(val)' not in revop:
2086 2087 expr.append(revop)
2087 2088 else:
2088 2089 if not isinstance(val, list):
2089 2090 e = revop % {'val': val}
2090 2091 else:
2091 2092 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2092 2093 expr.append(e)
2093 2094
2094 2095 if expr:
2095 2096 expr = '(' + ' and '.join(expr) + ')'
2096 2097 else:
2097 2098 expr = None
2098 2099 return expr, filematcher
2099 2100
2100 2101 def _logrevs(repo, opts):
2101 2102 # Default --rev value depends on --follow but --follow behavior
2102 2103 # depends on revisions resolved from --rev...
2103 2104 follow = opts.get('follow') or opts.get('follow_first')
2104 2105 if opts.get('rev'):
2105 2106 revs = scmutil.revrange(repo, opts['rev'])
2106 2107 elif follow and repo.dirstate.p1() == nullid:
2107 2108 revs = revset.baseset()
2108 2109 elif follow:
2109 2110 revs = repo.revs('reverse(:.)')
2110 2111 else:
2111 2112 revs = revset.spanset(repo)
2112 2113 revs.reverse()
2113 2114 return revs
2114 2115
2115 2116 def getgraphlogrevs(repo, pats, opts):
2116 2117 """Return (revs, expr, filematcher) where revs is an iterable of
2117 2118 revision numbers, expr is a revset string built from log options
2118 2119 and file patterns or None, and used to filter 'revs'. If --stat or
2119 2120 --patch are not passed filematcher is None. Otherwise it is a
2120 2121 callable taking a revision number and returning a match objects
2121 2122 filtering the files to be detailed when displaying the revision.
2122 2123 """
2123 2124 limit = loglimit(opts)
2124 2125 revs = _logrevs(repo, opts)
2125 2126 if not revs:
2126 2127 return revset.baseset(), None, None
2127 2128 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2128 2129 if opts.get('rev'):
2129 2130 # User-specified revs might be unsorted, but don't sort before
2130 2131 # _makelogrevset because it might depend on the order of revs
2131 2132 revs.sort(reverse=True)
2132 2133 if expr:
2133 2134 # Revset matchers often operate faster on revisions in changelog
2134 2135 # order, because most filters deal with the changelog.
2135 2136 revs.reverse()
2136 2137 matcher = revset.match(repo.ui, expr)
2137 2138 # Revset matches can reorder revisions. "A or B" typically returns
2138 2139 # returns the revision matching A then the revision matching B. Sort
2139 2140 # again to fix that.
2140 2141 revs = matcher(repo, revs)
2141 2142 revs.sort(reverse=True)
2142 2143 if limit is not None:
2143 2144 limitedrevs = []
2144 2145 for idx, rev in enumerate(revs):
2145 2146 if idx >= limit:
2146 2147 break
2147 2148 limitedrevs.append(rev)
2148 2149 revs = revset.baseset(limitedrevs)
2149 2150
2150 2151 return revs, expr, filematcher
2151 2152
2152 2153 def getlogrevs(repo, pats, opts):
2153 2154 """Return (revs, expr, filematcher) where revs is an iterable of
2154 2155 revision numbers, expr is a revset string built from log options
2155 2156 and file patterns or None, and used to filter 'revs'. If --stat or
2156 2157 --patch are not passed filematcher is None. Otherwise it is a
2157 2158 callable taking a revision number and returning a match objects
2158 2159 filtering the files to be detailed when displaying the revision.
2159 2160 """
2160 2161 limit = loglimit(opts)
2161 2162 revs = _logrevs(repo, opts)
2162 2163 if not revs:
2163 2164 return revset.baseset([]), None, None
2164 2165 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2165 2166 if expr:
2166 2167 # Revset matchers often operate faster on revisions in changelog
2167 2168 # order, because most filters deal with the changelog.
2168 2169 if not opts.get('rev'):
2169 2170 revs.reverse()
2170 2171 matcher = revset.match(repo.ui, expr)
2171 2172 # Revset matches can reorder revisions. "A or B" typically returns
2172 2173 # returns the revision matching A then the revision matching B. Sort
2173 2174 # again to fix that.
2174 2175 fixopts = ['branch', 'only_branch', 'keyword', 'user']
2175 2176 oldrevs = revs
2176 2177 revs = matcher(repo, revs)
2177 2178 if not opts.get('rev'):
2178 2179 revs.sort(reverse=True)
2179 2180 elif len(pats) > 1 or any(len(opts.get(op, [])) > 1 for op in fixopts):
2180 2181 # XXX "A or B" is known to change the order; fix it by filtering
2181 2182 # matched set again (issue5100)
2182 2183 revs = oldrevs & revs
2183 2184 if limit is not None:
2184 2185 limitedrevs = []
2185 2186 for idx, r in enumerate(revs):
2186 2187 if limit <= idx:
2187 2188 break
2188 2189 limitedrevs.append(r)
2189 2190 revs = revset.baseset(limitedrevs)
2190 2191
2191 2192 return revs, expr, filematcher
2192 2193
2193 2194 def _graphnodeformatter(ui, displayer):
2194 2195 spec = ui.config('ui', 'graphnodetemplate')
2195 2196 if not spec:
2196 2197 return templatekw.showgraphnode # fast path for "{graphnode}"
2197 2198
2198 2199 templ = formatter.gettemplater(ui, 'graphnode', spec)
2199 2200 cache = {}
2200 2201 if isinstance(displayer, changeset_templater):
2201 2202 cache = displayer.cache # reuse cache of slow templates
2202 2203 props = templatekw.keywords.copy()
2203 2204 props['templ'] = templ
2204 2205 props['cache'] = cache
2205 2206 def formatnode(repo, ctx):
2206 2207 props['ctx'] = ctx
2207 2208 props['repo'] = repo
2208 2209 props['revcache'] = {}
2209 2210 return templater.stringify(templ('graphnode', **props))
2210 2211 return formatnode
2211 2212
2212 2213 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2213 2214 filematcher=None):
2214 2215 formatnode = _graphnodeformatter(ui, displayer)
2215 2216 state = graphmod.asciistate()
2216 2217 for rev, type, ctx, parents in dag:
2217 2218 char = formatnode(repo, ctx)
2218 2219 copies = None
2219 2220 if getrenamed and ctx.rev():
2220 2221 copies = []
2221 2222 for fn in ctx.files():
2222 2223 rename = getrenamed(fn, ctx.rev())
2223 2224 if rename:
2224 2225 copies.append((fn, rename[0]))
2225 2226 revmatchfn = None
2226 2227 if filematcher is not None:
2227 2228 revmatchfn = filematcher(ctx.rev())
2228 2229 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2229 2230 lines = displayer.hunk.pop(rev).split('\n')
2230 2231 if not lines[-1]:
2231 2232 del lines[-1]
2232 2233 displayer.flush(ctx)
2233 2234 edges = edgefn(type, char, lines, state, rev, parents)
2234 2235 for type, char, lines, coldata in edges:
2235 2236 graphmod.ascii(ui, state, type, char, lines, coldata)
2236 2237 displayer.close()
2237 2238
2238 2239 def graphlog(ui, repo, *pats, **opts):
2239 2240 # Parameters are identical to log command ones
2240 2241 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2241 2242 revdag = graphmod.dagwalker(repo, revs)
2242 2243
2243 2244 getrenamed = None
2244 2245 if opts.get('copies'):
2245 2246 endrev = None
2246 2247 if opts.get('rev'):
2247 2248 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2248 2249 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2249 2250 displayer = show_changeset(ui, repo, opts, buffered=True)
2250 2251 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2251 2252 filematcher)
2252 2253
2253 2254 def checkunsupportedgraphflags(pats, opts):
2254 2255 for op in ["newest_first"]:
2255 2256 if op in opts and opts[op]:
2256 2257 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2257 2258 % op.replace("_", "-"))
2258 2259
2259 2260 def graphrevs(repo, nodes, opts):
2260 2261 limit = loglimit(opts)
2261 2262 nodes.reverse()
2262 2263 if limit is not None:
2263 2264 nodes = nodes[:limit]
2264 2265 return graphmod.nodes(repo, nodes)
2265 2266
2266 2267 def add(ui, repo, match, prefix, explicitonly, **opts):
2267 2268 join = lambda f: os.path.join(prefix, f)
2268 2269 bad = []
2269 2270
2270 2271 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2271 2272 names = []
2272 2273 wctx = repo[None]
2273 2274 cca = None
2274 2275 abort, warn = scmutil.checkportabilityalert(ui)
2275 2276 if abort or warn:
2276 2277 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2277 2278
2278 2279 badmatch = matchmod.badmatch(match, badfn)
2279 2280 dirstate = repo.dirstate
2280 2281 # We don't want to just call wctx.walk here, since it would return a lot of
2281 2282 # clean files, which we aren't interested in and takes time.
2282 2283 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2283 2284 True, False, full=False)):
2284 2285 exact = match.exact(f)
2285 2286 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2286 2287 if cca:
2287 2288 cca(f)
2288 2289 names.append(f)
2289 2290 if ui.verbose or not exact:
2290 2291 ui.status(_('adding %s\n') % match.rel(f))
2291 2292
2292 2293 for subpath in sorted(wctx.substate):
2293 2294 sub = wctx.sub(subpath)
2294 2295 try:
2295 2296 submatch = matchmod.subdirmatcher(subpath, match)
2296 2297 if opts.get('subrepos'):
2297 2298 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2298 2299 else:
2299 2300 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2300 2301 except error.LookupError:
2301 2302 ui.status(_("skipping missing subrepository: %s\n")
2302 2303 % join(subpath))
2303 2304
2304 2305 if not opts.get('dry_run'):
2305 2306 rejected = wctx.add(names, prefix)
2306 2307 bad.extend(f for f in rejected if f in match.files())
2307 2308 return bad
2308 2309
2309 2310 def forget(ui, repo, match, prefix, explicitonly):
2310 2311 join = lambda f: os.path.join(prefix, f)
2311 2312 bad = []
2312 2313 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2313 2314 wctx = repo[None]
2314 2315 forgot = []
2315 2316
2316 2317 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2317 2318 forget = sorted(s[0] + s[1] + s[3] + s[6])
2318 2319 if explicitonly:
2319 2320 forget = [f for f in forget if match.exact(f)]
2320 2321
2321 2322 for subpath in sorted(wctx.substate):
2322 2323 sub = wctx.sub(subpath)
2323 2324 try:
2324 2325 submatch = matchmod.subdirmatcher(subpath, match)
2325 2326 subbad, subforgot = sub.forget(submatch, prefix)
2326 2327 bad.extend([subpath + '/' + f for f in subbad])
2327 2328 forgot.extend([subpath + '/' + f for f in subforgot])
2328 2329 except error.LookupError:
2329 2330 ui.status(_("skipping missing subrepository: %s\n")
2330 2331 % join(subpath))
2331 2332
2332 2333 if not explicitonly:
2333 2334 for f in match.files():
2334 2335 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2335 2336 if f not in forgot:
2336 2337 if repo.wvfs.exists(f):
2337 2338 # Don't complain if the exact case match wasn't given.
2338 2339 # But don't do this until after checking 'forgot', so
2339 2340 # that subrepo files aren't normalized, and this op is
2340 2341 # purely from data cached by the status walk above.
2341 2342 if repo.dirstate.normalize(f) in repo.dirstate:
2342 2343 continue
2343 2344 ui.warn(_('not removing %s: '
2344 2345 'file is already untracked\n')
2345 2346 % match.rel(f))
2346 2347 bad.append(f)
2347 2348
2348 2349 for f in forget:
2349 2350 if ui.verbose or not match.exact(f):
2350 2351 ui.status(_('removing %s\n') % match.rel(f))
2351 2352
2352 2353 rejected = wctx.forget(forget, prefix)
2353 2354 bad.extend(f for f in rejected if f in match.files())
2354 2355 forgot.extend(f for f in forget if f not in rejected)
2355 2356 return bad, forgot
2356 2357
2357 2358 def files(ui, ctx, m, fm, fmt, subrepos):
2358 2359 rev = ctx.rev()
2359 2360 ret = 1
2360 2361 ds = ctx.repo().dirstate
2361 2362
2362 2363 for f in ctx.matches(m):
2363 2364 if rev is None and ds[f] == 'r':
2364 2365 continue
2365 2366 fm.startitem()
2366 2367 if ui.verbose:
2367 2368 fc = ctx[f]
2368 2369 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2369 2370 fm.data(abspath=f)
2370 2371 fm.write('path', fmt, m.rel(f))
2371 2372 ret = 0
2372 2373
2373 2374 for subpath in sorted(ctx.substate):
2374 2375 def matchessubrepo(subpath):
2375 2376 return (m.always() or m.exact(subpath)
2376 2377 or any(f.startswith(subpath + '/') for f in m.files()))
2377 2378
2378 2379 if subrepos or matchessubrepo(subpath):
2379 2380 sub = ctx.sub(subpath)
2380 2381 try:
2381 2382 submatch = matchmod.subdirmatcher(subpath, m)
2382 2383 if sub.printfiles(ui, submatch, fm, fmt, subrepos) == 0:
2383 2384 ret = 0
2384 2385 except error.LookupError:
2385 2386 ui.status(_("skipping missing subrepository: %s\n")
2386 2387 % m.abs(subpath))
2387 2388
2388 2389 return ret
2389 2390
2390 2391 def remove(ui, repo, m, prefix, after, force, subrepos):
2391 2392 join = lambda f: os.path.join(prefix, f)
2392 2393 ret = 0
2393 2394 s = repo.status(match=m, clean=True)
2394 2395 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2395 2396
2396 2397 wctx = repo[None]
2397 2398
2398 2399 for subpath in sorted(wctx.substate):
2399 2400 def matchessubrepo(matcher, subpath):
2400 2401 if matcher.exact(subpath):
2401 2402 return True
2402 2403 for f in matcher.files():
2403 2404 if f.startswith(subpath):
2404 2405 return True
2405 2406 return False
2406 2407
2407 2408 if subrepos or matchessubrepo(m, subpath):
2408 2409 sub = wctx.sub(subpath)
2409 2410 try:
2410 2411 submatch = matchmod.subdirmatcher(subpath, m)
2411 2412 if sub.removefiles(submatch, prefix, after, force, subrepos):
2412 2413 ret = 1
2413 2414 except error.LookupError:
2414 2415 ui.status(_("skipping missing subrepository: %s\n")
2415 2416 % join(subpath))
2416 2417
2417 2418 # warn about failure to delete explicit files/dirs
2418 2419 deleteddirs = util.dirs(deleted)
2419 2420 for f in m.files():
2420 2421 def insubrepo():
2421 2422 for subpath in wctx.substate:
2422 2423 if f.startswith(subpath):
2423 2424 return True
2424 2425 return False
2425 2426
2426 2427 isdir = f in deleteddirs or wctx.hasdir(f)
2427 2428 if f in repo.dirstate or isdir or f == '.' or insubrepo():
2428 2429 continue
2429 2430
2430 2431 if repo.wvfs.exists(f):
2431 2432 if repo.wvfs.isdir(f):
2432 2433 ui.warn(_('not removing %s: no tracked files\n')
2433 2434 % m.rel(f))
2434 2435 else:
2435 2436 ui.warn(_('not removing %s: file is untracked\n')
2436 2437 % m.rel(f))
2437 2438 # missing files will generate a warning elsewhere
2438 2439 ret = 1
2439 2440
2440 2441 if force:
2441 2442 list = modified + deleted + clean + added
2442 2443 elif after:
2443 2444 list = deleted
2444 2445 for f in modified + added + clean:
2445 2446 ui.warn(_('not removing %s: file still exists\n') % m.rel(f))
2446 2447 ret = 1
2447 2448 else:
2448 2449 list = deleted + clean
2449 2450 for f in modified:
2450 2451 ui.warn(_('not removing %s: file is modified (use -f'
2451 2452 ' to force removal)\n') % m.rel(f))
2452 2453 ret = 1
2453 2454 for f in added:
2454 2455 ui.warn(_('not removing %s: file has been marked for add'
2455 2456 ' (use forget to undo)\n') % m.rel(f))
2456 2457 ret = 1
2457 2458
2458 2459 for f in sorted(list):
2459 2460 if ui.verbose or not m.exact(f):
2460 2461 ui.status(_('removing %s\n') % m.rel(f))
2461 2462
2462 2463 with repo.wlock():
2463 2464 if not after:
2464 2465 for f in list:
2465 2466 if f in added:
2466 2467 continue # we never unlink added files on remove
2467 2468 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
2468 2469 repo[None].forget(list)
2469 2470
2470 2471 return ret
2471 2472
2472 2473 def cat(ui, repo, ctx, matcher, prefix, **opts):
2473 2474 err = 1
2474 2475
2475 2476 def write(path):
2476 2477 fp = makefileobj(repo, opts.get('output'), ctx.node(),
2477 2478 pathname=os.path.join(prefix, path))
2478 2479 data = ctx[path].data()
2479 2480 if opts.get('decode'):
2480 2481 data = repo.wwritedata(path, data)
2481 2482 fp.write(data)
2482 2483 fp.close()
2483 2484
2484 2485 # Automation often uses hg cat on single files, so special case it
2485 2486 # for performance to avoid the cost of parsing the manifest.
2486 2487 if len(matcher.files()) == 1 and not matcher.anypats():
2487 2488 file = matcher.files()[0]
2488 2489 mf = repo.manifest
2489 2490 mfnode = ctx.manifestnode()
2490 2491 if mfnode and mf.find(mfnode, file)[0]:
2491 2492 write(file)
2492 2493 return 0
2493 2494
2494 2495 # Don't warn about "missing" files that are really in subrepos
2495 2496 def badfn(path, msg):
2496 2497 for subpath in ctx.substate:
2497 2498 if path.startswith(subpath):
2498 2499 return
2499 2500 matcher.bad(path, msg)
2500 2501
2501 2502 for abs in ctx.walk(matchmod.badmatch(matcher, badfn)):
2502 2503 write(abs)
2503 2504 err = 0
2504 2505
2505 2506 for subpath in sorted(ctx.substate):
2506 2507 sub = ctx.sub(subpath)
2507 2508 try:
2508 2509 submatch = matchmod.subdirmatcher(subpath, matcher)
2509 2510
2510 2511 if not sub.cat(submatch, os.path.join(prefix, sub._path),
2511 2512 **opts):
2512 2513 err = 0
2513 2514 except error.RepoLookupError:
2514 2515 ui.status(_("skipping missing subrepository: %s\n")
2515 2516 % os.path.join(prefix, subpath))
2516 2517
2517 2518 return err
2518 2519
2519 2520 def commit(ui, repo, commitfunc, pats, opts):
2520 2521 '''commit the specified files or all outstanding changes'''
2521 2522 date = opts.get('date')
2522 2523 if date:
2523 2524 opts['date'] = util.parsedate(date)
2524 2525 message = logmessage(ui, opts)
2525 2526 matcher = scmutil.match(repo[None], pats, opts)
2526 2527
2527 2528 # extract addremove carefully -- this function can be called from a command
2528 2529 # that doesn't support addremove
2529 2530 if opts.get('addremove'):
2530 2531 if scmutil.addremove(repo, matcher, "", opts) != 0:
2531 2532 raise error.Abort(
2532 2533 _("failed to mark all new/missing files as added/removed"))
2533 2534
2534 2535 return commitfunc(ui, repo, message, matcher, opts)
2535 2536
2536 2537 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2537 2538 # avoid cycle context -> subrepo -> cmdutil
2538 2539 from . import context
2539 2540
2540 2541 # amend will reuse the existing user if not specified, but the obsolete
2541 2542 # marker creation requires that the current user's name is specified.
2542 2543 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2543 2544 ui.username() # raise exception if username not set
2544 2545
2545 2546 ui.note(_('amending changeset %s\n') % old)
2546 2547 base = old.p1()
2547 2548 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2548 2549
2549 2550 wlock = lock = newid = None
2550 2551 try:
2551 2552 wlock = repo.wlock()
2552 2553 lock = repo.lock()
2553 2554 with repo.transaction('amend') as tr:
2554 2555 # See if we got a message from -m or -l, if not, open the editor
2555 2556 # with the message of the changeset to amend
2556 2557 message = logmessage(ui, opts)
2557 2558 # ensure logfile does not conflict with later enforcement of the
2558 2559 # message. potential logfile content has been processed by
2559 2560 # `logmessage` anyway.
2560 2561 opts.pop('logfile')
2561 2562 # First, do a regular commit to record all changes in the working
2562 2563 # directory (if there are any)
2563 2564 ui.callhooks = False
2564 2565 activebookmark = repo._bookmarks.active
2565 2566 try:
2566 2567 repo._bookmarks.active = None
2567 2568 opts['message'] = 'temporary amend commit for %s' % old
2568 2569 node = commit(ui, repo, commitfunc, pats, opts)
2569 2570 finally:
2570 2571 repo._bookmarks.active = activebookmark
2571 2572 repo._bookmarks.recordchange(tr)
2572 2573 ui.callhooks = True
2573 2574 ctx = repo[node]
2574 2575
2575 2576 # Participating changesets:
2576 2577 #
2577 2578 # node/ctx o - new (intermediate) commit that contains changes
2578 2579 # | from working dir to go into amending commit
2579 2580 # | (or a workingctx if there were no changes)
2580 2581 # |
2581 2582 # old o - changeset to amend
2582 2583 # |
2583 2584 # base o - parent of amending changeset
2584 2585
2585 2586 # Update extra dict from amended commit (e.g. to preserve graft
2586 2587 # source)
2587 2588 extra.update(old.extra())
2588 2589
2589 2590 # Also update it from the intermediate commit or from the wctx
2590 2591 extra.update(ctx.extra())
2591 2592
2592 2593 if len(old.parents()) > 1:
2593 2594 # ctx.files() isn't reliable for merges, so fall back to the
2594 2595 # slower repo.status() method
2595 2596 files = set([fn for st in repo.status(base, old)[:3]
2596 2597 for fn in st])
2597 2598 else:
2598 2599 files = set(old.files())
2599 2600
2600 2601 # Second, we use either the commit we just did, or if there were no
2601 2602 # changes the parent of the working directory as the version of the
2602 2603 # files in the final amend commit
2603 2604 if node:
2604 2605 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2605 2606
2606 2607 user = ctx.user()
2607 2608 date = ctx.date()
2608 2609 # Recompute copies (avoid recording a -> b -> a)
2609 2610 copied = copies.pathcopies(base, ctx)
2610 2611 if old.p2:
2611 2612 copied.update(copies.pathcopies(old.p2(), ctx))
2612 2613
2613 2614 # Prune files which were reverted by the updates: if old
2614 2615 # introduced file X and our intermediate commit, node,
2615 2616 # renamed that file, then those two files are the same and
2616 2617 # we can discard X from our list of files. Likewise if X
2617 2618 # was deleted, it's no longer relevant
2618 2619 files.update(ctx.files())
2619 2620
2620 2621 def samefile(f):
2621 2622 if f in ctx.manifest():
2622 2623 a = ctx.filectx(f)
2623 2624 if f in base.manifest():
2624 2625 b = base.filectx(f)
2625 2626 return (not a.cmp(b)
2626 2627 and a.flags() == b.flags())
2627 2628 else:
2628 2629 return False
2629 2630 else:
2630 2631 return f not in base.manifest()
2631 2632 files = [f for f in files if not samefile(f)]
2632 2633
2633 2634 def filectxfn(repo, ctx_, path):
2634 2635 try:
2635 2636 fctx = ctx[path]
2636 2637 flags = fctx.flags()
2637 2638 mctx = context.memfilectx(repo,
2638 2639 fctx.path(), fctx.data(),
2639 2640 islink='l' in flags,
2640 2641 isexec='x' in flags,
2641 2642 copied=copied.get(path))
2642 2643 return mctx
2643 2644 except KeyError:
2644 2645 return None
2645 2646 else:
2646 2647 ui.note(_('copying changeset %s to %s\n') % (old, base))
2647 2648
2648 2649 # Use version of files as in the old cset
2649 2650 def filectxfn(repo, ctx_, path):
2650 2651 try:
2651 2652 return old.filectx(path)
2652 2653 except KeyError:
2653 2654 return None
2654 2655
2655 2656 user = opts.get('user') or old.user()
2656 2657 date = opts.get('date') or old.date()
2657 2658 editform = mergeeditform(old, 'commit.amend')
2658 2659 editor = getcommiteditor(editform=editform, **opts)
2659 2660 if not message:
2660 2661 editor = getcommiteditor(edit=True, editform=editform)
2661 2662 message = old.description()
2662 2663
2663 2664 pureextra = extra.copy()
2664 2665 extra['amend_source'] = old.hex()
2665 2666
2666 2667 new = context.memctx(repo,
2667 2668 parents=[base.node(), old.p2().node()],
2668 2669 text=message,
2669 2670 files=files,
2670 2671 filectxfn=filectxfn,
2671 2672 user=user,
2672 2673 date=date,
2673 2674 extra=extra,
2674 2675 editor=editor)
2675 2676
2676 2677 newdesc = changelog.stripdesc(new.description())
2677 2678 if ((not node)
2678 2679 and newdesc == old.description()
2679 2680 and user == old.user()
2680 2681 and date == old.date()
2681 2682 and pureextra == old.extra()):
2682 2683 # nothing changed. continuing here would create a new node
2683 2684 # anyway because of the amend_source noise.
2684 2685 #
2685 2686 # This not what we expect from amend.
2686 2687 return old.node()
2687 2688
2688 2689 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2689 2690 try:
2690 2691 if opts.get('secret'):
2691 2692 commitphase = 'secret'
2692 2693 else:
2693 2694 commitphase = old.phase()
2694 2695 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2695 2696 newid = repo.commitctx(new)
2696 2697 finally:
2697 2698 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2698 2699 if newid != old.node():
2699 2700 # Reroute the working copy parent to the new changeset
2700 2701 repo.setparents(newid, nullid)
2701 2702
2702 2703 # Move bookmarks from old parent to amend commit
2703 2704 bms = repo.nodebookmarks(old.node())
2704 2705 if bms:
2705 2706 marks = repo._bookmarks
2706 2707 for bm in bms:
2707 2708 ui.debug('moving bookmarks %r from %s to %s\n' %
2708 2709 (marks, old.hex(), hex(newid)))
2709 2710 marks[bm] = newid
2710 2711 marks.recordchange(tr)
2711 2712 #commit the whole amend process
2712 2713 if createmarkers:
2713 2714 # mark the new changeset as successor of the rewritten one
2714 2715 new = repo[newid]
2715 2716 obs = [(old, (new,))]
2716 2717 if node:
2717 2718 obs.append((ctx, ()))
2718 2719
2719 2720 obsolete.createmarkers(repo, obs)
2720 2721 if not createmarkers and newid != old.node():
2721 2722 # Strip the intermediate commit (if there was one) and the amended
2722 2723 # commit
2723 2724 if node:
2724 2725 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2725 2726 ui.note(_('stripping amended changeset %s\n') % old)
2726 2727 repair.strip(ui, repo, old.node(), topic='amend-backup')
2727 2728 finally:
2728 2729 lockmod.release(lock, wlock)
2729 2730 return newid
2730 2731
2731 2732 def commiteditor(repo, ctx, subs, editform=''):
2732 2733 if ctx.description():
2733 2734 return ctx.description()
2734 2735 return commitforceeditor(repo, ctx, subs, editform=editform,
2735 2736 unchangedmessagedetection=True)
2736 2737
2737 2738 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2738 2739 editform='', unchangedmessagedetection=False):
2739 2740 if not extramsg:
2740 2741 extramsg = _("Leave message empty to abort commit.")
2741 2742
2742 2743 forms = [e for e in editform.split('.') if e]
2743 2744 forms.insert(0, 'changeset')
2744 2745 templatetext = None
2745 2746 while forms:
2746 2747 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2747 2748 if tmpl:
2748 2749 templatetext = committext = buildcommittemplate(
2749 2750 repo, ctx, subs, extramsg, tmpl)
2750 2751 break
2751 2752 forms.pop()
2752 2753 else:
2753 2754 committext = buildcommittext(repo, ctx, subs, extramsg)
2754 2755
2755 2756 # run editor in the repository root
2756 2757 olddir = os.getcwd()
2757 2758 os.chdir(repo.root)
2758 2759
2759 2760 # make in-memory changes visible to external process
2760 2761 tr = repo.currenttransaction()
2761 2762 repo.dirstate.write(tr)
2762 2763 pending = tr and tr.writepending() and repo.root
2763 2764
2764 2765 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2765 2766 editform=editform, pending=pending)
2766 2767 text = re.sub("(?m)^HG:.*(\n|$)", "", editortext)
2767 2768 os.chdir(olddir)
2768 2769
2769 2770 if finishdesc:
2770 2771 text = finishdesc(text)
2771 2772 if not text.strip():
2772 2773 raise error.Abort(_("empty commit message"))
2773 2774 if unchangedmessagedetection and editortext == templatetext:
2774 2775 raise error.Abort(_("commit message unchanged"))
2775 2776
2776 2777 return text
2777 2778
2778 2779 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2779 2780 ui = repo.ui
2780 2781 tmpl, mapfile = gettemplate(ui, tmpl, None)
2781 2782
2782 2783 try:
2783 2784 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2784 2785 except SyntaxError as inst:
2785 2786 raise error.Abort(inst.args[0])
2786 2787
2787 2788 for k, v in repo.ui.configitems('committemplate'):
2788 2789 if k != 'changeset':
2789 2790 t.t.cache[k] = v
2790 2791
2791 2792 if not extramsg:
2792 2793 extramsg = '' # ensure that extramsg is string
2793 2794
2794 2795 ui.pushbuffer()
2795 2796 t.show(ctx, extramsg=extramsg)
2796 2797 return ui.popbuffer()
2797 2798
2798 2799 def hgprefix(msg):
2799 2800 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2800 2801
2801 2802 def buildcommittext(repo, ctx, subs, extramsg):
2802 2803 edittext = []
2803 2804 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2804 2805 if ctx.description():
2805 2806 edittext.append(ctx.description())
2806 2807 edittext.append("")
2807 2808 edittext.append("") # Empty line between message and comments.
2808 2809 edittext.append(hgprefix(_("Enter commit message."
2809 2810 " Lines beginning with 'HG:' are removed.")))
2810 2811 edittext.append(hgprefix(extramsg))
2811 2812 edittext.append("HG: --")
2812 2813 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2813 2814 if ctx.p2():
2814 2815 edittext.append(hgprefix(_("branch merge")))
2815 2816 if ctx.branch():
2816 2817 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2817 2818 if bookmarks.isactivewdirparent(repo):
2818 2819 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2819 2820 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2820 2821 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2821 2822 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2822 2823 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2823 2824 if not added and not modified and not removed:
2824 2825 edittext.append(hgprefix(_("no files changed")))
2825 2826 edittext.append("")
2826 2827
2827 2828 return "\n".join(edittext)
2828 2829
2829 2830 def commitstatus(repo, node, branch, bheads=None, opts=None):
2830 2831 if opts is None:
2831 2832 opts = {}
2832 2833 ctx = repo[node]
2833 2834 parents = ctx.parents()
2834 2835
2835 2836 if (not opts.get('amend') and bheads and node not in bheads and not
2836 2837 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2837 2838 repo.ui.status(_('created new head\n'))
2838 2839 # The message is not printed for initial roots. For the other
2839 2840 # changesets, it is printed in the following situations:
2840 2841 #
2841 2842 # Par column: for the 2 parents with ...
2842 2843 # N: null or no parent
2843 2844 # B: parent is on another named branch
2844 2845 # C: parent is a regular non head changeset
2845 2846 # H: parent was a branch head of the current branch
2846 2847 # Msg column: whether we print "created new head" message
2847 2848 # In the following, it is assumed that there already exists some
2848 2849 # initial branch heads of the current branch, otherwise nothing is
2849 2850 # printed anyway.
2850 2851 #
2851 2852 # Par Msg Comment
2852 2853 # N N y additional topo root
2853 2854 #
2854 2855 # B N y additional branch root
2855 2856 # C N y additional topo head
2856 2857 # H N n usual case
2857 2858 #
2858 2859 # B B y weird additional branch root
2859 2860 # C B y branch merge
2860 2861 # H B n merge with named branch
2861 2862 #
2862 2863 # C C y additional head from merge
2863 2864 # C H n merge with a head
2864 2865 #
2865 2866 # H H n head merge: head count decreases
2866 2867
2867 2868 if not opts.get('close_branch'):
2868 2869 for r in parents:
2869 2870 if r.closesbranch() and r.branch() == branch:
2870 2871 repo.ui.status(_('reopening closed branch head %d\n') % r)
2871 2872
2872 2873 if repo.ui.debugflag:
2873 2874 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2874 2875 elif repo.ui.verbose:
2875 2876 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2876 2877
2877 2878 def postcommitstatus(repo, pats, opts):
2878 2879 return repo.status(match=scmutil.match(repo[None], pats, opts))
2879 2880
2880 2881 def revert(ui, repo, ctx, parents, *pats, **opts):
2881 2882 parent, p2 = parents
2882 2883 node = ctx.node()
2883 2884
2884 2885 mf = ctx.manifest()
2885 2886 if node == p2:
2886 2887 parent = p2
2887 2888
2888 2889 # need all matching names in dirstate and manifest of target rev,
2889 2890 # so have to walk both. do not print errors if files exist in one
2890 2891 # but not other. in both cases, filesets should be evaluated against
2891 2892 # workingctx to get consistent result (issue4497). this means 'set:**'
2892 2893 # cannot be used to select missing files from target rev.
2893 2894
2894 2895 # `names` is a mapping for all elements in working copy and target revision
2895 2896 # The mapping is in the form:
2896 2897 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2897 2898 names = {}
2898 2899
2899 2900 with repo.wlock():
2900 2901 ## filling of the `names` mapping
2901 2902 # walk dirstate to fill `names`
2902 2903
2903 2904 interactive = opts.get('interactive', False)
2904 2905 wctx = repo[None]
2905 2906 m = scmutil.match(wctx, pats, opts)
2906 2907
2907 2908 # we'll need this later
2908 2909 targetsubs = sorted(s for s in wctx.substate if m(s))
2909 2910
2910 2911 if not m.always():
2911 2912 for abs in repo.walk(matchmod.badmatch(m, lambda x, y: False)):
2912 2913 names[abs] = m.rel(abs), m.exact(abs)
2913 2914
2914 2915 # walk target manifest to fill `names`
2915 2916
2916 2917 def badfn(path, msg):
2917 2918 if path in names:
2918 2919 return
2919 2920 if path in ctx.substate:
2920 2921 return
2921 2922 path_ = path + '/'
2922 2923 for f in names:
2923 2924 if f.startswith(path_):
2924 2925 return
2925 2926 ui.warn("%s: %s\n" % (m.rel(path), msg))
2926 2927
2927 2928 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
2928 2929 if abs not in names:
2929 2930 names[abs] = m.rel(abs), m.exact(abs)
2930 2931
2931 2932 # Find status of all file in `names`.
2932 2933 m = scmutil.matchfiles(repo, names)
2933 2934
2934 2935 changes = repo.status(node1=node, match=m,
2935 2936 unknown=True, ignored=True, clean=True)
2936 2937 else:
2937 2938 changes = repo.status(node1=node, match=m)
2938 2939 for kind in changes:
2939 2940 for abs in kind:
2940 2941 names[abs] = m.rel(abs), m.exact(abs)
2941 2942
2942 2943 m = scmutil.matchfiles(repo, names)
2943 2944
2944 2945 modified = set(changes.modified)
2945 2946 added = set(changes.added)
2946 2947 removed = set(changes.removed)
2947 2948 _deleted = set(changes.deleted)
2948 2949 unknown = set(changes.unknown)
2949 2950 unknown.update(changes.ignored)
2950 2951 clean = set(changes.clean)
2951 2952 modadded = set()
2952 2953
2953 2954 # split between files known in target manifest and the others
2954 2955 smf = set(mf)
2955 2956
2956 2957 # determine the exact nature of the deleted changesets
2957 2958 deladded = _deleted - smf
2958 2959 deleted = _deleted - deladded
2959 2960
2960 2961 # We need to account for the state of the file in the dirstate,
2961 2962 # even when we revert against something else than parent. This will
2962 2963 # slightly alter the behavior of revert (doing back up or not, delete
2963 2964 # or just forget etc).
2964 2965 if parent == node:
2965 2966 dsmodified = modified
2966 2967 dsadded = added
2967 2968 dsremoved = removed
2968 2969 # store all local modifications, useful later for rename detection
2969 2970 localchanges = dsmodified | dsadded
2970 2971 modified, added, removed = set(), set(), set()
2971 2972 else:
2972 2973 changes = repo.status(node1=parent, match=m)
2973 2974 dsmodified = set(changes.modified)
2974 2975 dsadded = set(changes.added)
2975 2976 dsremoved = set(changes.removed)
2976 2977 # store all local modifications, useful later for rename detection
2977 2978 localchanges = dsmodified | dsadded
2978 2979
2979 2980 # only take into account for removes between wc and target
2980 2981 clean |= dsremoved - removed
2981 2982 dsremoved &= removed
2982 2983 # distinct between dirstate remove and other
2983 2984 removed -= dsremoved
2984 2985
2985 2986 modadded = added & dsmodified
2986 2987 added -= modadded
2987 2988
2988 2989 # tell newly modified apart.
2989 2990 dsmodified &= modified
2990 2991 dsmodified |= modified & dsadded # dirstate added may needs backup
2991 2992 modified -= dsmodified
2992 2993
2993 2994 # We need to wait for some post-processing to update this set
2994 2995 # before making the distinction. The dirstate will be used for
2995 2996 # that purpose.
2996 2997 dsadded = added
2997 2998
2998 2999 # in case of merge, files that are actually added can be reported as
2999 3000 # modified, we need to post process the result
3000 3001 if p2 != nullid:
3001 3002 mergeadd = dsmodified - smf
3002 3003 dsadded |= mergeadd
3003 3004 dsmodified -= mergeadd
3004 3005
3005 3006 # if f is a rename, update `names` to also revert the source
3006 3007 cwd = repo.getcwd()
3007 3008 for f in localchanges:
3008 3009 src = repo.dirstate.copied(f)
3009 3010 # XXX should we check for rename down to target node?
3010 3011 if src and src not in names and repo.dirstate[src] == 'r':
3011 3012 dsremoved.add(src)
3012 3013 names[src] = (repo.pathto(src, cwd), True)
3013 3014
3014 3015 # distinguish between file to forget and the other
3015 3016 added = set()
3016 3017 for abs in dsadded:
3017 3018 if repo.dirstate[abs] != 'a':
3018 3019 added.add(abs)
3019 3020 dsadded -= added
3020 3021
3021 3022 for abs in deladded:
3022 3023 if repo.dirstate[abs] == 'a':
3023 3024 dsadded.add(abs)
3024 3025 deladded -= dsadded
3025 3026
3026 3027 # For files marked as removed, we check if an unknown file is present at
3027 3028 # the same path. If a such file exists it may need to be backed up.
3028 3029 # Making the distinction at this stage helps have simpler backup
3029 3030 # logic.
3030 3031 removunk = set()
3031 3032 for abs in removed:
3032 3033 target = repo.wjoin(abs)
3033 3034 if os.path.lexists(target):
3034 3035 removunk.add(abs)
3035 3036 removed -= removunk
3036 3037
3037 3038 dsremovunk = set()
3038 3039 for abs in dsremoved:
3039 3040 target = repo.wjoin(abs)
3040 3041 if os.path.lexists(target):
3041 3042 dsremovunk.add(abs)
3042 3043 dsremoved -= dsremovunk
3043 3044
3044 3045 # action to be actually performed by revert
3045 3046 # (<list of file>, message>) tuple
3046 3047 actions = {'revert': ([], _('reverting %s\n')),
3047 3048 'add': ([], _('adding %s\n')),
3048 3049 'remove': ([], _('removing %s\n')),
3049 3050 'drop': ([], _('removing %s\n')),
3050 3051 'forget': ([], _('forgetting %s\n')),
3051 3052 'undelete': ([], _('undeleting %s\n')),
3052 3053 'noop': (None, _('no changes needed to %s\n')),
3053 3054 'unknown': (None, _('file not managed: %s\n')),
3054 3055 }
3055 3056
3056 3057 # "constant" that convey the backup strategy.
3057 3058 # All set to `discard` if `no-backup` is set do avoid checking
3058 3059 # no_backup lower in the code.
3059 3060 # These values are ordered for comparison purposes
3060 3061 backup = 2 # unconditionally do backup
3061 3062 check = 1 # check if the existing file differs from target
3062 3063 discard = 0 # never do backup
3063 3064 if opts.get('no_backup'):
3064 3065 backup = check = discard
3065 3066
3066 3067 backupanddel = actions['remove']
3067 3068 if not opts.get('no_backup'):
3068 3069 backupanddel = actions['drop']
3069 3070
3070 3071 disptable = (
3071 3072 # dispatch table:
3072 3073 # file state
3073 3074 # action
3074 3075 # make backup
3075 3076
3076 3077 ## Sets that results that will change file on disk
3077 3078 # Modified compared to target, no local change
3078 3079 (modified, actions['revert'], discard),
3079 3080 # Modified compared to target, but local file is deleted
3080 3081 (deleted, actions['revert'], discard),
3081 3082 # Modified compared to target, local change
3082 3083 (dsmodified, actions['revert'], backup),
3083 3084 # Added since target
3084 3085 (added, actions['remove'], discard),
3085 3086 # Added in working directory
3086 3087 (dsadded, actions['forget'], discard),
3087 3088 # Added since target, have local modification
3088 3089 (modadded, backupanddel, backup),
3089 3090 # Added since target but file is missing in working directory
3090 3091 (deladded, actions['drop'], discard),
3091 3092 # Removed since target, before working copy parent
3092 3093 (removed, actions['add'], discard),
3093 3094 # Same as `removed` but an unknown file exists at the same path
3094 3095 (removunk, actions['add'], check),
3095 3096 # Removed since targe, marked as such in working copy parent
3096 3097 (dsremoved, actions['undelete'], discard),
3097 3098 # Same as `dsremoved` but an unknown file exists at the same path
3098 3099 (dsremovunk, actions['undelete'], check),
3099 3100 ## the following sets does not result in any file changes
3100 3101 # File with no modification
3101 3102 (clean, actions['noop'], discard),
3102 3103 # Existing file, not tracked anywhere
3103 3104 (unknown, actions['unknown'], discard),
3104 3105 )
3105 3106
3106 3107 for abs, (rel, exact) in sorted(names.items()):
3107 3108 # target file to be touch on disk (relative to cwd)
3108 3109 target = repo.wjoin(abs)
3109 3110 # search the entry in the dispatch table.
3110 3111 # if the file is in any of these sets, it was touched in the working
3111 3112 # directory parent and we are sure it needs to be reverted.
3112 3113 for table, (xlist, msg), dobackup in disptable:
3113 3114 if abs not in table:
3114 3115 continue
3115 3116 if xlist is not None:
3116 3117 xlist.append(abs)
3117 3118 if dobackup and (backup <= dobackup
3118 3119 or wctx[abs].cmp(ctx[abs])):
3119 3120 bakname = scmutil.origpath(ui, repo, rel)
3120 3121 ui.note(_('saving current version of %s as %s\n') %
3121 3122 (rel, bakname))
3122 3123 if not opts.get('dry_run'):
3123 3124 if interactive:
3124 3125 util.copyfile(target, bakname)
3125 3126 else:
3126 3127 util.rename(target, bakname)
3127 3128 if ui.verbose or not exact:
3128 3129 if not isinstance(msg, basestring):
3129 3130 msg = msg(abs)
3130 3131 ui.status(msg % rel)
3131 3132 elif exact:
3132 3133 ui.warn(msg % rel)
3133 3134 break
3134 3135
3135 3136 if not opts.get('dry_run'):
3136 3137 needdata = ('revert', 'add', 'undelete')
3137 3138 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3138 3139 _performrevert(repo, parents, ctx, actions, interactive)
3139 3140
3140 3141 if targetsubs:
3141 3142 # Revert the subrepos on the revert list
3142 3143 for sub in targetsubs:
3143 3144 try:
3144 3145 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3145 3146 except KeyError:
3146 3147 raise error.Abort("subrepository '%s' does not exist in %s!"
3147 3148 % (sub, short(ctx.node())))
3148 3149
3149 3150 def _revertprefetch(repo, ctx, *files):
3150 3151 """Let extension changing the storage layer prefetch content"""
3151 3152 pass
3152 3153
3153 3154 def _performrevert(repo, parents, ctx, actions, interactive=False):
3154 3155 """function that actually perform all the actions computed for revert
3155 3156
3156 3157 This is an independent function to let extension to plug in and react to
3157 3158 the imminent revert.
3158 3159
3159 3160 Make sure you have the working directory locked when calling this function.
3160 3161 """
3161 3162 parent, p2 = parents
3162 3163 node = ctx.node()
3163 3164 excluded_files = []
3164 3165 matcher_opts = {"exclude": excluded_files}
3165 3166
3166 3167 def checkout(f):
3167 3168 fc = ctx[f]
3168 3169 repo.wwrite(f, fc.data(), fc.flags())
3169 3170
3170 3171 audit_path = pathutil.pathauditor(repo.root)
3171 3172 for f in actions['forget'][0]:
3172 3173 if interactive:
3173 3174 choice = \
3174 3175 repo.ui.promptchoice(
3175 3176 _("forget added file %s (yn)?$$ &Yes $$ &No")
3176 3177 % f)
3177 3178 if choice == 0:
3178 3179 repo.dirstate.drop(f)
3179 3180 else:
3180 3181 excluded_files.append(repo.wjoin(f))
3181 3182 else:
3182 3183 repo.dirstate.drop(f)
3183 3184 for f in actions['remove'][0]:
3184 3185 audit_path(f)
3185 3186 try:
3186 3187 util.unlinkpath(repo.wjoin(f))
3187 3188 except OSError:
3188 3189 pass
3189 3190 repo.dirstate.remove(f)
3190 3191 for f in actions['drop'][0]:
3191 3192 audit_path(f)
3192 3193 repo.dirstate.remove(f)
3193 3194
3194 3195 normal = None
3195 3196 if node == parent:
3196 3197 # We're reverting to our parent. If possible, we'd like status
3197 3198 # to report the file as clean. We have to use normallookup for
3198 3199 # merges to avoid losing information about merged/dirty files.
3199 3200 if p2 != nullid:
3200 3201 normal = repo.dirstate.normallookup
3201 3202 else:
3202 3203 normal = repo.dirstate.normal
3203 3204
3204 3205 newlyaddedandmodifiedfiles = set()
3205 3206 if interactive:
3206 3207 # Prompt the user for changes to revert
3207 3208 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3208 3209 m = scmutil.match(ctx, torevert, matcher_opts)
3209 3210 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3210 3211 diffopts.nodates = True
3211 3212 diffopts.git = True
3212 3213 reversehunks = repo.ui.configbool('experimental',
3213 3214 'revertalternateinteractivemode',
3214 3215 True)
3215 3216 if reversehunks:
3216 3217 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3217 3218 else:
3218 3219 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3219 3220 originalchunks = patch.parsepatch(diff)
3220 3221
3221 3222 try:
3222 3223
3223 3224 chunks, opts = recordfilter(repo.ui, originalchunks)
3224 3225 if reversehunks:
3225 3226 chunks = patch.reversehunks(chunks)
3226 3227
3227 3228 except patch.PatchError as err:
3228 3229 raise error.Abort(_('error parsing patch: %s') % err)
3229 3230
3230 3231 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3231 3232 # Apply changes
3232 3233 fp = cStringIO.StringIO()
3233 3234 for c in chunks:
3234 3235 c.write(fp)
3235 3236 dopatch = fp.tell()
3236 3237 fp.seek(0)
3237 3238 if dopatch:
3238 3239 try:
3239 3240 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3240 3241 except patch.PatchError as err:
3241 3242 raise error.Abort(str(err))
3242 3243 del fp
3243 3244 else:
3244 3245 for f in actions['revert'][0]:
3245 3246 checkout(f)
3246 3247 if normal:
3247 3248 normal(f)
3248 3249
3249 3250 for f in actions['add'][0]:
3250 3251 # Don't checkout modified files, they are already created by the diff
3251 3252 if f not in newlyaddedandmodifiedfiles:
3252 3253 checkout(f)
3253 3254 repo.dirstate.add(f)
3254 3255
3255 3256 normal = repo.dirstate.normallookup
3256 3257 if node == parent and p2 == nullid:
3257 3258 normal = repo.dirstate.normal
3258 3259 for f in actions['undelete'][0]:
3259 3260 checkout(f)
3260 3261 normal(f)
3261 3262
3262 3263 copied = copies.pathcopies(repo[parent], ctx)
3263 3264
3264 3265 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3265 3266 if f in copied:
3266 3267 repo.dirstate.copy(copied[f], f)
3267 3268
3268 3269 def command(table):
3269 3270 """Returns a function object to be used as a decorator for making commands.
3270 3271
3271 3272 This function receives a command table as its argument. The table should
3272 3273 be a dict.
3273 3274
3274 3275 The returned function can be used as a decorator for adding commands
3275 3276 to that command table. This function accepts multiple arguments to define
3276 3277 a command.
3277 3278
3278 3279 The first argument is the command name.
3279 3280
3280 3281 The options argument is an iterable of tuples defining command arguments.
3281 3282 See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
3282 3283
3283 3284 The synopsis argument defines a short, one line summary of how to use the
3284 3285 command. This shows up in the help output.
3285 3286
3286 3287 The norepo argument defines whether the command does not require a
3287 3288 local repository. Most commands operate against a repository, thus the
3288 3289 default is False.
3289 3290
3290 3291 The optionalrepo argument defines whether the command optionally requires
3291 3292 a local repository.
3292 3293
3293 3294 The inferrepo argument defines whether to try to find a repository from the
3294 3295 command line arguments. If True, arguments will be examined for potential
3295 3296 repository locations. See ``findrepo()``. If a repository is found, it
3296 3297 will be used.
3297 3298 """
3298 3299 def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False,
3299 3300 inferrepo=False):
3300 3301 def decorator(func):
3301 3302 func.norepo = norepo
3302 3303 func.optionalrepo = optionalrepo
3303 3304 func.inferrepo = inferrepo
3304 3305 if synopsis:
3305 3306 table[name] = func, list(options), synopsis
3306 3307 else:
3307 3308 table[name] = func, list(options)
3308 3309 return func
3309 3310 return decorator
3310 3311
3311 3312 return cmd
3312 3313
3313 3314 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3314 3315 # commands.outgoing. "missing" is "missing" of the result of
3315 3316 # "findcommonoutgoing()"
3316 3317 outgoinghooks = util.hooks()
3317 3318
3318 3319 # a list of (ui, repo) functions called by commands.summary
3319 3320 summaryhooks = util.hooks()
3320 3321
3321 3322 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3322 3323 #
3323 3324 # functions should return tuple of booleans below, if 'changes' is None:
3324 3325 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3325 3326 #
3326 3327 # otherwise, 'changes' is a tuple of tuples below:
3327 3328 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3328 3329 # - (desturl, destbranch, destpeer, outgoing)
3329 3330 summaryremotehooks = util.hooks()
3330 3331
3331 3332 # A list of state files kept by multistep operations like graft.
3332 3333 # Since graft cannot be aborted, it is considered 'clearable' by update.
3333 3334 # note: bisect is intentionally excluded
3334 3335 # (state file, clearable, allowcommit, error, hint)
3335 3336 unfinishedstates = [
3336 3337 ('graftstate', True, False, _('graft in progress'),
3337 3338 _("use 'hg graft --continue' or 'hg update' to abort")),
3338 3339 ('updatestate', True, False, _('last update was interrupted'),
3339 3340 _("use 'hg update' to get a consistent checkout"))
3340 3341 ]
3341 3342
3342 3343 def checkunfinished(repo, commit=False):
3343 3344 '''Look for an unfinished multistep operation, like graft, and abort
3344 3345 if found. It's probably good to check this right before
3345 3346 bailifchanged().
3346 3347 '''
3347 3348 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3348 3349 if commit and allowcommit:
3349 3350 continue
3350 3351 if repo.vfs.exists(f):
3351 3352 raise error.Abort(msg, hint=hint)
3352 3353
3353 3354 def clearunfinished(repo):
3354 3355 '''Check for unfinished operations (as above), and clear the ones
3355 3356 that are clearable.
3356 3357 '''
3357 3358 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3358 3359 if not clearable and repo.vfs.exists(f):
3359 3360 raise error.Abort(msg, hint=hint)
3360 3361 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3361 3362 if clearable and repo.vfs.exists(f):
3362 3363 util.unlink(repo.join(f))
3363 3364
3364 3365 afterresolvedstates = [
3365 3366 ('graftstate',
3366 3367 _('hg graft --continue')),
3367 3368 ]
3368 3369
3369 3370 def howtocontinue(repo):
3370 3371 '''Check for an unfinished operation and return the command to finish
3371 3372 it.
3372 3373
3373 3374 afterresolvedstates tupples define a .hg/{file} and the corresponding
3374 3375 command needed to finish it.
3375 3376
3376 3377 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3377 3378 a boolean.
3378 3379 '''
3379 3380 contmsg = _("continue: %s")
3380 3381 for f, msg in afterresolvedstates:
3381 3382 if repo.vfs.exists(f):
3382 3383 return contmsg % msg, True
3383 3384 workingctx = repo[None]
3384 3385 dirty = any(repo.status()) or any(workingctx.sub(s).dirty()
3385 3386 for s in workingctx.substate)
3386 3387 if dirty:
3387 3388 return contmsg % _("hg commit"), False
3388 3389 return None, None
3389 3390
3390 3391 def checkafterresolved(repo):
3391 3392 '''Inform the user about the next action after completing hg resolve
3392 3393
3393 3394 If there's a matching afterresolvedstates, howtocontinue will yield
3394 3395 repo.ui.warn as the reporter.
3395 3396
3396 3397 Otherwise, it will yield repo.ui.note.
3397 3398 '''
3398 3399 msg, warning = howtocontinue(repo)
3399 3400 if msg is not None:
3400 3401 if warning:
3401 3402 repo.ui.warn("%s\n" % msg)
3402 3403 else:
3403 3404 repo.ui.note("%s\n" % msg)
3404 3405
3405 3406 def wrongtooltocontinue(repo, task):
3406 3407 '''Raise an abort suggesting how to properly continue if there is an
3407 3408 active task.
3408 3409
3409 3410 Uses howtocontinue() to find the active task.
3410 3411
3411 3412 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3412 3413 a hint.
3413 3414 '''
3414 3415 after = howtocontinue(repo)
3415 3416 hint = None
3416 3417 if after[1]:
3417 3418 hint = after[0]
3418 3419 raise error.Abort(_('no %s in progress') % task, hint=hint)
3419 3420
3420 3421 class dirstateguard(object):
3421 3422 '''Restore dirstate at unexpected failure.
3422 3423
3423 3424 At the construction, this class does:
3424 3425
3425 3426 - write current ``repo.dirstate`` out, and
3426 3427 - save ``.hg/dirstate`` into the backup file
3427 3428
3428 3429 This restores ``.hg/dirstate`` from backup file, if ``release()``
3429 3430 is invoked before ``close()``.
3430 3431
3431 3432 This just removes the backup file at ``close()`` before ``release()``.
3432 3433 '''
3433 3434
3434 3435 def __init__(self, repo, name):
3435 3436 self._repo = repo
3436 3437 self._suffix = '.backup.%s.%d' % (name, id(self))
3437 3438 repo.dirstate._savebackup(repo.currenttransaction(), self._suffix)
3438 3439 self._active = True
3439 3440 self._closed = False
3440 3441
3441 3442 def __del__(self):
3442 3443 if self._active: # still active
3443 3444 # this may occur, even if this class is used correctly:
3444 3445 # for example, releasing other resources like transaction
3445 3446 # may raise exception before ``dirstateguard.release`` in
3446 3447 # ``release(tr, ....)``.
3447 3448 self._abort()
3448 3449
3449 3450 def close(self):
3450 3451 if not self._active: # already inactivated
3451 3452 msg = (_("can't close already inactivated backup: dirstate%s")
3452 3453 % self._suffix)
3453 3454 raise error.Abort(msg)
3454 3455
3455 3456 self._repo.dirstate._clearbackup(self._repo.currenttransaction(),
3456 3457 self._suffix)
3457 3458 self._active = False
3458 3459 self._closed = True
3459 3460
3460 3461 def _abort(self):
3461 3462 self._repo.dirstate._restorebackup(self._repo.currenttransaction(),
3462 3463 self._suffix)
3463 3464 self._active = False
3464 3465
3465 3466 def release(self):
3466 3467 if not self._closed:
3467 3468 if not self._active: # already inactivated
3468 3469 msg = (_("can't release already inactivated backup:"
3469 3470 " dirstate%s")
3470 3471 % self._suffix)
3471 3472 raise error.Abort(msg)
3472 3473 self._abort()
@@ -1,218 +1,218 b''
1 1 # formatter.py - generic output formatting for mercurial
2 2 #
3 3 # Copyright 2012 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import cPickle
11 11 import os
12 12
13 13 from .i18n import _
14 14 from .node import (
15 15 hex,
16 16 short,
17 17 )
18 18
19 19 from . import (
20 20 encoding,
21 21 error,
22 22 templater,
23 23 )
24 24
25 25 class baseformatter(object):
26 26 def __init__(self, ui, topic, opts):
27 27 self._ui = ui
28 28 self._topic = topic
29 29 self._style = opts.get("style")
30 30 self._template = opts.get("template")
31 31 self._item = None
32 32 # function to convert node to string suitable for this output
33 33 self.hexfunc = hex
34 34 def __nonzero__(self):
35 35 '''return False if we're not doing real templating so we can
36 36 skip extra work'''
37 37 return True
38 38 def _showitem(self):
39 39 '''show a formatted item once all data is collected'''
40 40 pass
41 41 def startitem(self):
42 42 '''begin an item in the format list'''
43 43 if self._item is not None:
44 44 self._showitem()
45 45 self._item = {}
46 46 def data(self, **data):
47 47 '''insert data into item that's not shown in default output'''
48 48 self._item.update(data)
49 49 def write(self, fields, deftext, *fielddata, **opts):
50 50 '''do default text output while assigning data to item'''
51 51 fieldkeys = fields.split()
52 52 assert len(fieldkeys) == len(fielddata)
53 53 self._item.update(zip(fieldkeys, fielddata))
54 54 def condwrite(self, cond, fields, deftext, *fielddata, **opts):
55 55 '''do conditional write (primarily for plain formatter)'''
56 56 fieldkeys = fields.split()
57 57 assert len(fieldkeys) == len(fielddata)
58 58 self._item.update(zip(fieldkeys, fielddata))
59 59 def plain(self, text, **opts):
60 60 '''show raw text for non-templated mode'''
61 61 pass
62 62 def end(self):
63 63 '''end output for the formatter'''
64 64 if self._item is not None:
65 65 self._showitem()
66 66
67 67 class plainformatter(baseformatter):
68 68 '''the default text output scheme'''
69 69 def __init__(self, ui, topic, opts):
70 70 baseformatter.__init__(self, ui, topic, opts)
71 71 if ui.debugflag:
72 72 self.hexfunc = hex
73 73 else:
74 74 self.hexfunc = short
75 75 def __nonzero__(self):
76 76 return False
77 77 def startitem(self):
78 78 pass
79 79 def data(self, **data):
80 80 pass
81 81 def write(self, fields, deftext, *fielddata, **opts):
82 82 self._ui.write(deftext % fielddata, **opts)
83 83 def condwrite(self, cond, fields, deftext, *fielddata, **opts):
84 84 '''do conditional write'''
85 85 if cond:
86 86 self._ui.write(deftext % fielddata, **opts)
87 87 def plain(self, text, **opts):
88 88 self._ui.write(text, **opts)
89 89 def end(self):
90 90 pass
91 91
92 92 class debugformatter(baseformatter):
93 93 def __init__(self, ui, topic, opts):
94 94 baseformatter.__init__(self, ui, topic, opts)
95 95 self._ui.write("%s = [\n" % self._topic)
96 96 def _showitem(self):
97 97 self._ui.write(" " + repr(self._item) + ",\n")
98 98 def end(self):
99 99 baseformatter.end(self)
100 100 self._ui.write("]\n")
101 101
102 102 class pickleformatter(baseformatter):
103 103 def __init__(self, ui, topic, opts):
104 104 baseformatter.__init__(self, ui, topic, opts)
105 105 self._data = []
106 106 def _showitem(self):
107 107 self._data.append(self._item)
108 108 def end(self):
109 109 baseformatter.end(self)
110 110 self._ui.write(cPickle.dumps(self._data))
111 111
112 112 def _jsonifyobj(v):
113 113 if isinstance(v, tuple):
114 114 return '[' + ', '.join(_jsonifyobj(e) for e in v) + ']'
115 115 elif v is None:
116 116 return 'null'
117 117 elif v is True:
118 118 return 'true'
119 119 elif v is False:
120 120 return 'false'
121 121 elif isinstance(v, (int, float)):
122 122 return str(v)
123 123 else:
124 124 return '"%s"' % encoding.jsonescape(v)
125 125
126 126 class jsonformatter(baseformatter):
127 127 def __init__(self, ui, topic, opts):
128 128 baseformatter.__init__(self, ui, topic, opts)
129 129 self._ui.write("[")
130 130 self._ui._first = True
131 131 def _showitem(self):
132 132 if self._ui._first:
133 133 self._ui._first = False
134 134 else:
135 135 self._ui.write(",")
136 136
137 137 self._ui.write("\n {\n")
138 138 first = True
139 139 for k, v in sorted(self._item.items()):
140 140 if first:
141 141 first = False
142 142 else:
143 143 self._ui.write(",\n")
144 144 self._ui.write(' "%s": %s' % (k, _jsonifyobj(v)))
145 145 self._ui.write("\n }")
146 146 def end(self):
147 147 baseformatter.end(self)
148 148 self._ui.write("\n]\n")
149 149
150 150 class templateformatter(baseformatter):
151 151 def __init__(self, ui, topic, opts):
152 152 baseformatter.__init__(self, ui, topic, opts)
153 153 self._topic = topic
154 154 self._t = gettemplater(ui, topic, opts.get('template', ''))
155 155 def _showitem(self):
156 g = self._t(self._topic, **self._item)
156 g = self._t(self._topic, ui=self._ui, **self._item)
157 157 self._ui.write(templater.stringify(g))
158 158
159 159 def lookuptemplate(ui, topic, tmpl):
160 160 # looks like a literal template?
161 161 if '{' in tmpl:
162 162 return tmpl, None
163 163
164 164 # perhaps a stock style?
165 165 if not os.path.split(tmpl)[0]:
166 166 mapname = (templater.templatepath('map-cmdline.' + tmpl)
167 167 or templater.templatepath(tmpl))
168 168 if mapname and os.path.isfile(mapname):
169 169 return None, mapname
170 170
171 171 # perhaps it's a reference to [templates]
172 172 t = ui.config('templates', tmpl)
173 173 if t:
174 174 try:
175 175 tmpl = templater.unquotestring(t)
176 176 except SyntaxError:
177 177 tmpl = t
178 178 return tmpl, None
179 179
180 180 if tmpl == 'list':
181 181 ui.write(_("available styles: %s\n") % templater.stylelist())
182 182 raise error.Abort(_("specify a template"))
183 183
184 184 # perhaps it's a path to a map or a template
185 185 if ('/' in tmpl or '\\' in tmpl) and os.path.isfile(tmpl):
186 186 # is it a mapfile for a style?
187 187 if os.path.basename(tmpl).startswith("map-"):
188 188 return None, os.path.realpath(tmpl)
189 189 tmpl = open(tmpl).read()
190 190 return tmpl, None
191 191
192 192 # constant string?
193 193 return tmpl, None
194 194
195 195 def gettemplater(ui, topic, spec):
196 196 tmpl, mapfile = lookuptemplate(ui, topic, spec)
197 197 t = templater.templater(mapfile, {})
198 198 if tmpl:
199 199 t.cache[topic] = tmpl
200 200 return t
201 201
202 202 def formatter(ui, topic, opts):
203 203 template = opts.get("template", "")
204 204 if template == "json":
205 205 return jsonformatter(ui, topic, opts)
206 206 elif template == "pickle":
207 207 return pickleformatter(ui, topic, opts)
208 208 elif template == "debug":
209 209 return debugformatter(ui, topic, opts)
210 210 elif template != "":
211 211 return templateformatter(ui, topic, opts)
212 212 # developer config: ui.formatdebug
213 213 elif ui.configbool('ui', 'formatdebug'):
214 214 return debugformatter(ui, topic, opts)
215 215 # deprecated config: ui.formatjson
216 216 elif ui.configbool('ui', 'formatjson'):
217 217 return jsonformatter(ui, topic, opts)
218 218 return plainformatter(ui, topic, opts)
@@ -1,1033 +1,1032 b''
1 1 # templater.py - template expansion for output
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import os
11 11 import re
12 12 import types
13 13
14 14 from .i18n import _
15 15 from . import (
16 16 config,
17 17 error,
18 18 minirst,
19 19 parser,
20 20 revset as revsetmod,
21 21 templatefilters,
22 22 templatekw,
23 23 util,
24 24 )
25 25
26 26 # template parsing
27 27
28 28 elements = {
29 29 # token-type: binding-strength, primary, prefix, infix, suffix
30 30 "(": (20, None, ("group", 1, ")"), ("func", 1, ")"), None),
31 31 ",": (2, None, None, ("list", 2), None),
32 32 "|": (5, None, None, ("|", 5), None),
33 33 "%": (6, None, None, ("%", 6), None),
34 34 ")": (0, None, None, None, None),
35 35 "integer": (0, "integer", None, None, None),
36 36 "symbol": (0, "symbol", None, None, None),
37 37 "string": (0, "string", None, None, None),
38 38 "template": (0, "template", None, None, None),
39 39 "end": (0, None, None, None, None),
40 40 }
41 41
42 42 def tokenize(program, start, end):
43 43 pos = start
44 44 while pos < end:
45 45 c = program[pos]
46 46 if c.isspace(): # skip inter-token whitespace
47 47 pass
48 48 elif c in "(,)%|": # handle simple operators
49 49 yield (c, None, pos)
50 50 elif c in '"\'': # handle quoted templates
51 51 s = pos + 1
52 52 data, pos = _parsetemplate(program, s, end, c)
53 53 yield ('template', data, s)
54 54 pos -= 1
55 55 elif c == 'r' and program[pos:pos + 2] in ("r'", 'r"'):
56 56 # handle quoted strings
57 57 c = program[pos + 1]
58 58 s = pos = pos + 2
59 59 while pos < end: # find closing quote
60 60 d = program[pos]
61 61 if d == '\\': # skip over escaped characters
62 62 pos += 2
63 63 continue
64 64 if d == c:
65 65 yield ('string', program[s:pos], s)
66 66 break
67 67 pos += 1
68 68 else:
69 69 raise error.ParseError(_("unterminated string"), s)
70 70 elif c.isdigit() or c == '-':
71 71 s = pos
72 72 if c == '-': # simply take negate operator as part of integer
73 73 pos += 1
74 74 if pos >= end or not program[pos].isdigit():
75 75 raise error.ParseError(_("integer literal without digits"), s)
76 76 pos += 1
77 77 while pos < end:
78 78 d = program[pos]
79 79 if not d.isdigit():
80 80 break
81 81 pos += 1
82 82 yield ('integer', program[s:pos], s)
83 83 pos -= 1
84 84 elif (c == '\\' and program[pos:pos + 2] in (r"\'", r'\"')
85 85 or c == 'r' and program[pos:pos + 3] in (r"r\'", r'r\"')):
86 86 # handle escaped quoted strings for compatibility with 2.9.2-3.4,
87 87 # where some of nested templates were preprocessed as strings and
88 88 # then compiled. therefore, \"...\" was allowed. (issue4733)
89 89 #
90 90 # processing flow of _evalifliteral() at 5ab28a2e9962:
91 91 # outer template string -> stringify() -> compiletemplate()
92 92 # ------------------------ ------------ ------------------
93 93 # {f("\\\\ {g(\"\\\"\")}"} \\ {g("\"")} [r'\\', {g("\"")}]
94 94 # ~~~~~~~~
95 95 # escaped quoted string
96 96 if c == 'r':
97 97 pos += 1
98 98 token = 'string'
99 99 else:
100 100 token = 'template'
101 101 quote = program[pos:pos + 2]
102 102 s = pos = pos + 2
103 103 while pos < end: # find closing escaped quote
104 104 if program.startswith('\\\\\\', pos, end):
105 105 pos += 4 # skip over double escaped characters
106 106 continue
107 107 if program.startswith(quote, pos, end):
108 108 # interpret as if it were a part of an outer string
109 109 data = parser.unescapestr(program[s:pos])
110 110 if token == 'template':
111 111 data = _parsetemplate(data, 0, len(data))[0]
112 112 yield (token, data, s)
113 113 pos += 1
114 114 break
115 115 pos += 1
116 116 else:
117 117 raise error.ParseError(_("unterminated string"), s)
118 118 elif c.isalnum() or c in '_':
119 119 s = pos
120 120 pos += 1
121 121 while pos < end: # find end of symbol
122 122 d = program[pos]
123 123 if not (d.isalnum() or d == "_"):
124 124 break
125 125 pos += 1
126 126 sym = program[s:pos]
127 127 yield ('symbol', sym, s)
128 128 pos -= 1
129 129 elif c == '}':
130 130 yield ('end', None, pos + 1)
131 131 return
132 132 else:
133 133 raise error.ParseError(_("syntax error"), pos)
134 134 pos += 1
135 135 raise error.ParseError(_("unterminated template expansion"), start)
136 136
137 137 def _parsetemplate(tmpl, start, stop, quote=''):
138 138 r"""
139 139 >>> _parsetemplate('foo{bar}"baz', 0, 12)
140 140 ([('string', 'foo'), ('symbol', 'bar'), ('string', '"baz')], 12)
141 141 >>> _parsetemplate('foo{bar}"baz', 0, 12, quote='"')
142 142 ([('string', 'foo'), ('symbol', 'bar')], 9)
143 143 >>> _parsetemplate('foo"{bar}', 0, 9, quote='"')
144 144 ([('string', 'foo')], 4)
145 145 >>> _parsetemplate(r'foo\"bar"baz', 0, 12, quote='"')
146 146 ([('string', 'foo"'), ('string', 'bar')], 9)
147 147 >>> _parsetemplate(r'foo\\"bar', 0, 10, quote='"')
148 148 ([('string', 'foo\\')], 6)
149 149 """
150 150 parsed = []
151 151 sepchars = '{' + quote
152 152 pos = start
153 153 p = parser.parser(elements)
154 154 while pos < stop:
155 155 n = min((tmpl.find(c, pos, stop) for c in sepchars),
156 156 key=lambda n: (n < 0, n))
157 157 if n < 0:
158 158 parsed.append(('string', parser.unescapestr(tmpl[pos:stop])))
159 159 pos = stop
160 160 break
161 161 c = tmpl[n]
162 162 bs = (n - pos) - len(tmpl[pos:n].rstrip('\\'))
163 163 if bs % 2 == 1:
164 164 # escaped (e.g. '\{', '\\\{', but not '\\{')
165 165 parsed.append(('string', parser.unescapestr(tmpl[pos:n - 1]) + c))
166 166 pos = n + 1
167 167 continue
168 168 if n > pos:
169 169 parsed.append(('string', parser.unescapestr(tmpl[pos:n])))
170 170 if c == quote:
171 171 return parsed, n + 1
172 172
173 173 parseres, pos = p.parse(tokenize(tmpl, n + 1, stop))
174 174 parsed.append(parseres)
175 175
176 176 if quote:
177 177 raise error.ParseError(_("unterminated string"), start)
178 178 return parsed, pos
179 179
180 180 def compiletemplate(tmpl, context):
181 181 parsed, pos = _parsetemplate(tmpl, 0, len(tmpl))
182 182 return [compileexp(e, context, methods) for e in parsed]
183 183
184 184 def compileexp(exp, context, curmethods):
185 185 t = exp[0]
186 186 if t in curmethods:
187 187 return curmethods[t](exp, context)
188 188 raise error.ParseError(_("unknown method '%s'") % t)
189 189
190 190 # template evaluation
191 191
192 192 def getsymbol(exp):
193 193 if exp[0] == 'symbol':
194 194 return exp[1]
195 195 raise error.ParseError(_("expected a symbol, got '%s'") % exp[0])
196 196
197 197 def getlist(x):
198 198 if not x:
199 199 return []
200 200 if x[0] == 'list':
201 201 return getlist(x[1]) + [x[2]]
202 202 return [x]
203 203
204 204 def gettemplate(exp, context):
205 205 if exp[0] == 'template':
206 206 return [compileexp(e, context, methods) for e in exp[1]]
207 207 if exp[0] == 'symbol':
208 208 # unlike runsymbol(), here 'symbol' is always taken as template name
209 209 # even if it exists in mapping. this allows us to override mapping
210 210 # by web templates, e.g. 'changelogtag' is redefined in map file.
211 211 return context._load(exp[1])
212 212 raise error.ParseError(_("expected template specifier"))
213 213
214 214 def evalfuncarg(context, mapping, arg):
215 215 func, data = arg
216 216 # func() may return string, generator of strings or arbitrary object such
217 217 # as date tuple, but filter does not want generator.
218 218 thing = func(context, mapping, data)
219 219 if isinstance(thing, types.GeneratorType):
220 220 thing = stringify(thing)
221 221 return thing
222 222
223 223 def evalinteger(context, mapping, arg, err):
224 224 v = evalfuncarg(context, mapping, arg)
225 225 try:
226 226 return int(v)
227 227 except (TypeError, ValueError):
228 228 raise error.ParseError(err)
229 229
230 230 def evalstring(context, mapping, arg):
231 231 func, data = arg
232 232 return stringify(func(context, mapping, data))
233 233
234 234 def evalstringliteral(context, mapping, arg):
235 235 """Evaluate given argument as string template, but returns symbol name
236 236 if it is unknown"""
237 237 func, data = arg
238 238 if func is runsymbol:
239 239 thing = func(context, mapping, data, default=data)
240 240 else:
241 241 thing = func(context, mapping, data)
242 242 return stringify(thing)
243 243
244 244 def runinteger(context, mapping, data):
245 245 return int(data)
246 246
247 247 def runstring(context, mapping, data):
248 248 return data
249 249
250 250 def _recursivesymbolblocker(key):
251 251 def showrecursion(**args):
252 252 raise error.Abort(_("recursive reference '%s' in template") % key)
253 253 return showrecursion
254 254
255 255 def _runrecursivesymbol(context, mapping, key):
256 256 raise error.Abort(_("recursive reference '%s' in template") % key)
257 257
258 258 def runsymbol(context, mapping, key, default=''):
259 259 v = mapping.get(key)
260 260 if v is None:
261 261 v = context._defaults.get(key)
262 262 if v is None:
263 263 # put poison to cut recursion. we can't move this to parsing phase
264 264 # because "x = {x}" is allowed if "x" is a keyword. (issue4758)
265 265 safemapping = mapping.copy()
266 266 safemapping[key] = _recursivesymbolblocker(key)
267 267 try:
268 268 v = context.process(key, safemapping)
269 269 except TemplateNotFound:
270 270 v = default
271 271 if callable(v):
272 272 return v(**mapping)
273 273 return v
274 274
275 275 def buildtemplate(exp, context):
276 276 ctmpl = [compileexp(e, context, methods) for e in exp[1]]
277 277 if len(ctmpl) == 1:
278 278 return ctmpl[0] # fast path for string with no template fragment
279 279 return (runtemplate, ctmpl)
280 280
281 281 def runtemplate(context, mapping, template):
282 282 for func, data in template:
283 283 yield func(context, mapping, data)
284 284
285 285 def buildfilter(exp, context):
286 286 arg = compileexp(exp[1], context, methods)
287 287 n = getsymbol(exp[2])
288 288 if n in context._filters:
289 289 filt = context._filters[n]
290 290 return (runfilter, (arg, filt))
291 291 if n in funcs:
292 292 f = funcs[n]
293 293 return (f, [arg])
294 294 raise error.ParseError(_("unknown function '%s'") % n)
295 295
296 296 def runfilter(context, mapping, data):
297 297 arg, filt = data
298 298 thing = evalfuncarg(context, mapping, arg)
299 299 try:
300 300 return filt(thing)
301 301 except (ValueError, AttributeError, TypeError):
302 302 if isinstance(arg[1], tuple):
303 303 dt = arg[1][1]
304 304 else:
305 305 dt = arg[1]
306 306 raise error.Abort(_("template filter '%s' is not compatible with "
307 307 "keyword '%s'") % (filt.func_name, dt))
308 308
309 309 def buildmap(exp, context):
310 310 func, data = compileexp(exp[1], context, methods)
311 311 ctmpl = gettemplate(exp[2], context)
312 312 return (runmap, (func, data, ctmpl))
313 313
314 314 def runmap(context, mapping, data):
315 315 func, data, ctmpl = data
316 316 d = func(context, mapping, data)
317 317 if util.safehasattr(d, 'itermaps'):
318 318 diter = d.itermaps()
319 319 else:
320 320 try:
321 321 diter = iter(d)
322 322 except TypeError:
323 323 if func is runsymbol:
324 324 raise error.ParseError(_("keyword '%s' is not iterable") % data)
325 325 else:
326 326 raise error.ParseError(_("%r is not iterable") % d)
327 327
328 328 for i in diter:
329 329 lm = mapping.copy()
330 330 if isinstance(i, dict):
331 331 lm.update(i)
332 332 lm['originalnode'] = mapping.get('node')
333 333 yield runtemplate(context, lm, ctmpl)
334 334 else:
335 335 # v is not an iterable of dicts, this happen when 'key'
336 336 # has been fully expanded already and format is useless.
337 337 # If so, return the expanded value.
338 338 yield i
339 339
340 340 def buildfunc(exp, context):
341 341 n = getsymbol(exp[1])
342 342 args = [compileexp(x, context, exprmethods) for x in getlist(exp[2])]
343 343 if n in funcs:
344 344 f = funcs[n]
345 345 return (f, args)
346 346 if n in context._filters:
347 347 if len(args) != 1:
348 348 raise error.ParseError(_("filter %s expects one argument") % n)
349 349 f = context._filters[n]
350 350 return (runfilter, (args[0], f))
351 351 raise error.ParseError(_("unknown function '%s'") % n)
352 352
353 353 def date(context, mapping, args):
354 354 """:date(date[, fmt]): Format a date. See :hg:`help dates` for formatting
355 355 strings. The default is a Unix date format, including the timezone:
356 356 "Mon Sep 04 15:13:13 2006 0700"."""
357 357 if not (1 <= len(args) <= 2):
358 358 # i18n: "date" is a keyword
359 359 raise error.ParseError(_("date expects one or two arguments"))
360 360
361 361 date = evalfuncarg(context, mapping, args[0])
362 362 fmt = None
363 363 if len(args) == 2:
364 364 fmt = evalstring(context, mapping, args[1])
365 365 try:
366 366 if fmt is None:
367 367 return util.datestr(date)
368 368 else:
369 369 return util.datestr(date, fmt)
370 370 except (TypeError, ValueError):
371 371 # i18n: "date" is a keyword
372 372 raise error.ParseError(_("date expects a date information"))
373 373
374 374 def diff(context, mapping, args):
375 375 """:diff([includepattern [, excludepattern]]): Show a diff, optionally
376 376 specifying files to include or exclude."""
377 377 if len(args) > 2:
378 378 # i18n: "diff" is a keyword
379 379 raise error.ParseError(_("diff expects zero, one, or two arguments"))
380 380
381 381 def getpatterns(i):
382 382 if i < len(args):
383 383 s = evalstring(context, mapping, args[i]).strip()
384 384 if s:
385 385 return [s]
386 386 return []
387 387
388 388 ctx = mapping['ctx']
389 389 chunks = ctx.diff(match=ctx.match([], getpatterns(0), getpatterns(1)))
390 390
391 391 return ''.join(chunks)
392 392
393 393 def fill(context, mapping, args):
394 394 """:fill(text[, width[, initialident[, hangindent]]]): Fill many
395 395 paragraphs with optional indentation. See the "fill" filter."""
396 396 if not (1 <= len(args) <= 4):
397 397 # i18n: "fill" is a keyword
398 398 raise error.ParseError(_("fill expects one to four arguments"))
399 399
400 400 text = evalstring(context, mapping, args[0])
401 401 width = 76
402 402 initindent = ''
403 403 hangindent = ''
404 404 if 2 <= len(args) <= 4:
405 405 width = evalinteger(context, mapping, args[1],
406 406 # i18n: "fill" is a keyword
407 407 _("fill expects an integer width"))
408 408 try:
409 409 initindent = evalstring(context, mapping, args[2])
410 410 hangindent = evalstring(context, mapping, args[3])
411 411 except IndexError:
412 412 pass
413 413
414 414 return templatefilters.fill(text, width, initindent, hangindent)
415 415
416 416 def pad(context, mapping, args):
417 417 """:pad(text, width[, fillchar=' '[, right=False]]): Pad text with a
418 418 fill character."""
419 419 if not (2 <= len(args) <= 4):
420 420 # i18n: "pad" is a keyword
421 421 raise error.ParseError(_("pad() expects two to four arguments"))
422 422
423 423 width = evalinteger(context, mapping, args[1],
424 424 # i18n: "pad" is a keyword
425 425 _("pad() expects an integer width"))
426 426
427 427 text = evalstring(context, mapping, args[0])
428 428
429 429 right = False
430 430 fillchar = ' '
431 431 if len(args) > 2:
432 432 fillchar = evalstring(context, mapping, args[2])
433 433 if len(args) > 3:
434 434 right = util.parsebool(args[3][1])
435 435
436 436 if right:
437 437 return text.rjust(width, fillchar)
438 438 else:
439 439 return text.ljust(width, fillchar)
440 440
441 441 def indent(context, mapping, args):
442 442 """:indent(text, indentchars[, firstline]): Indents all non-empty lines
443 443 with the characters given in the indentchars string. An optional
444 444 third parameter will override the indent for the first line only
445 445 if present."""
446 446 if not (2 <= len(args) <= 3):
447 447 # i18n: "indent" is a keyword
448 448 raise error.ParseError(_("indent() expects two or three arguments"))
449 449
450 450 text = evalstring(context, mapping, args[0])
451 451 indent = evalstring(context, mapping, args[1])
452 452
453 453 if len(args) == 3:
454 454 firstline = evalstring(context, mapping, args[2])
455 455 else:
456 456 firstline = indent
457 457
458 458 # the indent function doesn't indent the first line, so we do it here
459 459 return templatefilters.indent(firstline + text, indent)
460 460
461 461 def get(context, mapping, args):
462 462 """:get(dict, key): Get an attribute/key from an object. Some keywords
463 463 are complex types. This function allows you to obtain the value of an
464 464 attribute on these types."""
465 465 if len(args) != 2:
466 466 # i18n: "get" is a keyword
467 467 raise error.ParseError(_("get() expects two arguments"))
468 468
469 469 dictarg = evalfuncarg(context, mapping, args[0])
470 470 if not util.safehasattr(dictarg, 'get'):
471 471 # i18n: "get" is a keyword
472 472 raise error.ParseError(_("get() expects a dict as first argument"))
473 473
474 474 key = evalfuncarg(context, mapping, args[1])
475 475 return dictarg.get(key)
476 476
477 477 def if_(context, mapping, args):
478 478 """:if(expr, then[, else]): Conditionally execute based on the result of
479 479 an expression."""
480 480 if not (2 <= len(args) <= 3):
481 481 # i18n: "if" is a keyword
482 482 raise error.ParseError(_("if expects two or three arguments"))
483 483
484 484 test = evalstring(context, mapping, args[0])
485 485 if test:
486 486 yield args[1][0](context, mapping, args[1][1])
487 487 elif len(args) == 3:
488 488 yield args[2][0](context, mapping, args[2][1])
489 489
490 490 def ifcontains(context, mapping, args):
491 491 """:ifcontains(search, thing, then[, else]): Conditionally execute based
492 492 on whether the item "search" is in "thing"."""
493 493 if not (3 <= len(args) <= 4):
494 494 # i18n: "ifcontains" is a keyword
495 495 raise error.ParseError(_("ifcontains expects three or four arguments"))
496 496
497 497 item = evalstring(context, mapping, args[0])
498 498 items = evalfuncarg(context, mapping, args[1])
499 499
500 500 if item in items:
501 501 yield args[2][0](context, mapping, args[2][1])
502 502 elif len(args) == 4:
503 503 yield args[3][0](context, mapping, args[3][1])
504 504
505 505 def ifeq(context, mapping, args):
506 506 """:ifeq(expr1, expr2, then[, else]): Conditionally execute based on
507 507 whether 2 items are equivalent."""
508 508 if not (3 <= len(args) <= 4):
509 509 # i18n: "ifeq" is a keyword
510 510 raise error.ParseError(_("ifeq expects three or four arguments"))
511 511
512 512 test = evalstring(context, mapping, args[0])
513 513 match = evalstring(context, mapping, args[1])
514 514 if test == match:
515 515 yield args[2][0](context, mapping, args[2][1])
516 516 elif len(args) == 4:
517 517 yield args[3][0](context, mapping, args[3][1])
518 518
519 519 def join(context, mapping, args):
520 520 """:join(list, sep): Join items in a list with a delimiter."""
521 521 if not (1 <= len(args) <= 2):
522 522 # i18n: "join" is a keyword
523 523 raise error.ParseError(_("join expects one or two arguments"))
524 524
525 525 joinset = args[0][0](context, mapping, args[0][1])
526 526 if util.safehasattr(joinset, 'itermaps'):
527 527 jf = joinset.joinfmt
528 528 joinset = [jf(x) for x in joinset.itermaps()]
529 529
530 530 joiner = " "
531 531 if len(args) > 1:
532 532 joiner = evalstring(context, mapping, args[1])
533 533
534 534 first = True
535 535 for x in joinset:
536 536 if first:
537 537 first = False
538 538 else:
539 539 yield joiner
540 540 yield x
541 541
542 542 def label(context, mapping, args):
543 543 """:label(label, expr): Apply a label to generated content. Content with
544 544 a label applied can result in additional post-processing, such as
545 545 automatic colorization."""
546 546 if len(args) != 2:
547 547 # i18n: "label" is a keyword
548 548 raise error.ParseError(_("label expects two arguments"))
549 549
550 550 thing = evalstring(context, mapping, args[1])
551 551
552 # apparently, repo could be a string that is the favicon?
553 repo = mapping.get('repo', '')
554 if isinstance(repo, str):
552 ui = mapping.get('ui', '')
553 if isinstance(ui, str):
555 554 return thing
556 555
557 556 # preserve unknown symbol as literal so effects like 'red', 'bold',
558 557 # etc. don't need to be quoted
559 558 label = evalstringliteral(context, mapping, args[0])
560 559
561 return repo.ui.label(thing, label)
560 return ui.label(thing, label)
562 561
563 562 def latesttag(context, mapping, args):
564 563 """:latesttag([pattern]): The global tags matching the given pattern on the
565 564 most recent globally tagged ancestor of this changeset."""
566 565 if len(args) > 1:
567 566 # i18n: "latesttag" is a keyword
568 567 raise error.ParseError(_("latesttag expects at most one argument"))
569 568
570 569 pattern = None
571 570 if len(args) == 1:
572 571 pattern = evalstring(context, mapping, args[0])
573 572
574 573 return templatekw.showlatesttags(pattern, **mapping)
575 574
576 575 def localdate(context, mapping, args):
577 576 """:localdate(date[, tz]): Converts a date to the specified timezone.
578 577 The default is local date."""
579 578 if not (1 <= len(args) <= 2):
580 579 # i18n: "localdate" is a keyword
581 580 raise error.ParseError(_("localdate expects one or two arguments"))
582 581
583 582 date = evalfuncarg(context, mapping, args[0])
584 583 try:
585 584 date = util.parsedate(date)
586 585 except AttributeError: # not str nor date tuple
587 586 # i18n: "localdate" is a keyword
588 587 raise error.ParseError(_("localdate expects a date information"))
589 588 if len(args) >= 2:
590 589 tzoffset = None
591 590 tz = evalfuncarg(context, mapping, args[1])
592 591 if isinstance(tz, str):
593 592 tzoffset = util.parsetimezone(tz)
594 593 if tzoffset is None:
595 594 try:
596 595 tzoffset = int(tz)
597 596 except (TypeError, ValueError):
598 597 # i18n: "localdate" is a keyword
599 598 raise error.ParseError(_("localdate expects a timezone"))
600 599 else:
601 600 tzoffset = util.makedate()[1]
602 601 return (date[0], tzoffset)
603 602
604 603 def revset(context, mapping, args):
605 604 """:revset(query[, formatargs...]): Execute a revision set query. See
606 605 :hg:`help revset`."""
607 606 if not len(args) > 0:
608 607 # i18n: "revset" is a keyword
609 608 raise error.ParseError(_("revset expects one or more arguments"))
610 609
611 610 raw = evalstring(context, mapping, args[0])
612 611 ctx = mapping['ctx']
613 612 repo = ctx.repo()
614 613
615 614 def query(expr):
616 615 m = revsetmod.match(repo.ui, expr)
617 616 return m(repo)
618 617
619 618 if len(args) > 1:
620 619 formatargs = [evalfuncarg(context, mapping, a) for a in args[1:]]
621 620 revs = query(revsetmod.formatspec(raw, *formatargs))
622 621 revs = list(revs)
623 622 else:
624 623 revsetcache = mapping['cache'].setdefault("revsetcache", {})
625 624 if raw in revsetcache:
626 625 revs = revsetcache[raw]
627 626 else:
628 627 revs = query(raw)
629 628 revs = list(revs)
630 629 revsetcache[raw] = revs
631 630
632 631 return templatekw.showrevslist("revision", revs, **mapping)
633 632
634 633 def rstdoc(context, mapping, args):
635 634 """:rstdoc(text, style): Format ReStructuredText."""
636 635 if len(args) != 2:
637 636 # i18n: "rstdoc" is a keyword
638 637 raise error.ParseError(_("rstdoc expects two arguments"))
639 638
640 639 text = evalstring(context, mapping, args[0])
641 640 style = evalstring(context, mapping, args[1])
642 641
643 642 return minirst.format(text, style=style, keep=['verbose'])
644 643
645 644 def shortest(context, mapping, args):
646 645 """:shortest(node, minlength=4): Obtain the shortest representation of
647 646 a node."""
648 647 if not (1 <= len(args) <= 2):
649 648 # i18n: "shortest" is a keyword
650 649 raise error.ParseError(_("shortest() expects one or two arguments"))
651 650
652 651 node = evalstring(context, mapping, args[0])
653 652
654 653 minlength = 4
655 654 if len(args) > 1:
656 655 minlength = evalinteger(context, mapping, args[1],
657 656 # i18n: "shortest" is a keyword
658 657 _("shortest() expects an integer minlength"))
659 658
660 659 cl = mapping['ctx']._repo.changelog
661 660 def isvalid(test):
662 661 try:
663 662 try:
664 663 cl.index.partialmatch(test)
665 664 except AttributeError:
666 665 # Pure mercurial doesn't support partialmatch on the index.
667 666 # Fallback to the slow way.
668 667 if cl._partialmatch(test) is None:
669 668 return False
670 669
671 670 try:
672 671 i = int(test)
673 672 # if we are a pure int, then starting with zero will not be
674 673 # confused as a rev; or, obviously, if the int is larger than
675 674 # the value of the tip rev
676 675 if test[0] == '0' or i > len(cl):
677 676 return True
678 677 return False
679 678 except ValueError:
680 679 return True
681 680 except error.RevlogError:
682 681 return False
683 682
684 683 shortest = node
685 684 startlength = max(6, minlength)
686 685 length = startlength
687 686 while True:
688 687 test = node[:length]
689 688 if isvalid(test):
690 689 shortest = test
691 690 if length == minlength or length > startlength:
692 691 return shortest
693 692 length -= 1
694 693 else:
695 694 length += 1
696 695 if len(shortest) <= length:
697 696 return shortest
698 697
699 698 def strip(context, mapping, args):
700 699 """:strip(text[, chars]): Strip characters from a string. By default,
701 700 strips all leading and trailing whitespace."""
702 701 if not (1 <= len(args) <= 2):
703 702 # i18n: "strip" is a keyword
704 703 raise error.ParseError(_("strip expects one or two arguments"))
705 704
706 705 text = evalstring(context, mapping, args[0])
707 706 if len(args) == 2:
708 707 chars = evalstring(context, mapping, args[1])
709 708 return text.strip(chars)
710 709 return text.strip()
711 710
712 711 def sub(context, mapping, args):
713 712 """:sub(pattern, replacement, expression): Perform text substitution
714 713 using regular expressions."""
715 714 if len(args) != 3:
716 715 # i18n: "sub" is a keyword
717 716 raise error.ParseError(_("sub expects three arguments"))
718 717
719 718 pat = evalstring(context, mapping, args[0])
720 719 rpl = evalstring(context, mapping, args[1])
721 720 src = evalstring(context, mapping, args[2])
722 721 try:
723 722 patre = re.compile(pat)
724 723 except re.error:
725 724 # i18n: "sub" is a keyword
726 725 raise error.ParseError(_("sub got an invalid pattern: %s") % pat)
727 726 try:
728 727 yield patre.sub(rpl, src)
729 728 except re.error:
730 729 # i18n: "sub" is a keyword
731 730 raise error.ParseError(_("sub got an invalid replacement: %s") % rpl)
732 731
733 732 def startswith(context, mapping, args):
734 733 """:startswith(pattern, text): Returns the value from the "text" argument
735 734 if it begins with the content from the "pattern" argument."""
736 735 if len(args) != 2:
737 736 # i18n: "startswith" is a keyword
738 737 raise error.ParseError(_("startswith expects two arguments"))
739 738
740 739 patn = evalstring(context, mapping, args[0])
741 740 text = evalstring(context, mapping, args[1])
742 741 if text.startswith(patn):
743 742 return text
744 743 return ''
745 744
746 745
747 746 def word(context, mapping, args):
748 747 """:word(number, text[, separator]): Return the nth word from a string."""
749 748 if not (2 <= len(args) <= 3):
750 749 # i18n: "word" is a keyword
751 750 raise error.ParseError(_("word expects two or three arguments, got %d")
752 751 % len(args))
753 752
754 753 num = evalinteger(context, mapping, args[0],
755 754 # i18n: "word" is a keyword
756 755 _("word expects an integer index"))
757 756 text = evalstring(context, mapping, args[1])
758 757 if len(args) == 3:
759 758 splitter = evalstring(context, mapping, args[2])
760 759 else:
761 760 splitter = None
762 761
763 762 tokens = text.split(splitter)
764 763 if num >= len(tokens) or num < -len(tokens):
765 764 return ''
766 765 else:
767 766 return tokens[num]
768 767
769 768 # methods to interpret function arguments or inner expressions (e.g. {_(x)})
770 769 exprmethods = {
771 770 "integer": lambda e, c: (runinteger, e[1]),
772 771 "string": lambda e, c: (runstring, e[1]),
773 772 "symbol": lambda e, c: (runsymbol, e[1]),
774 773 "template": buildtemplate,
775 774 "group": lambda e, c: compileexp(e[1], c, exprmethods),
776 775 # ".": buildmember,
777 776 "|": buildfilter,
778 777 "%": buildmap,
779 778 "func": buildfunc,
780 779 }
781 780
782 781 # methods to interpret top-level template (e.g. {x}, {x|_}, {x % "y"})
783 782 methods = exprmethods.copy()
784 783 methods["integer"] = exprmethods["symbol"] # '{1}' as variable
785 784
786 785 funcs = {
787 786 "date": date,
788 787 "diff": diff,
789 788 "fill": fill,
790 789 "get": get,
791 790 "if": if_,
792 791 "ifcontains": ifcontains,
793 792 "ifeq": ifeq,
794 793 "indent": indent,
795 794 "join": join,
796 795 "label": label,
797 796 "latesttag": latesttag,
798 797 "localdate": localdate,
799 798 "pad": pad,
800 799 "revset": revset,
801 800 "rstdoc": rstdoc,
802 801 "shortest": shortest,
803 802 "startswith": startswith,
804 803 "strip": strip,
805 804 "sub": sub,
806 805 "word": word,
807 806 }
808 807
809 808 # template engine
810 809
811 810 stringify = templatefilters.stringify
812 811
813 812 def _flatten(thing):
814 813 '''yield a single stream from a possibly nested set of iterators'''
815 814 if isinstance(thing, str):
816 815 yield thing
817 816 elif not util.safehasattr(thing, '__iter__'):
818 817 if thing is not None:
819 818 yield str(thing)
820 819 else:
821 820 for i in thing:
822 821 if isinstance(i, str):
823 822 yield i
824 823 elif not util.safehasattr(i, '__iter__'):
825 824 if i is not None:
826 825 yield str(i)
827 826 elif i is not None:
828 827 for j in _flatten(i):
829 828 yield j
830 829
831 830 def unquotestring(s):
832 831 '''unwrap quotes'''
833 832 if len(s) < 2 or s[0] != s[-1]:
834 833 raise SyntaxError(_('unmatched quotes'))
835 834 return s[1:-1]
836 835
837 836 class engine(object):
838 837 '''template expansion engine.
839 838
840 839 template expansion works like this. a map file contains key=value
841 840 pairs. if value is quoted, it is treated as string. otherwise, it
842 841 is treated as name of template file.
843 842
844 843 templater is asked to expand a key in map. it looks up key, and
845 844 looks for strings like this: {foo}. it expands {foo} by looking up
846 845 foo in map, and substituting it. expansion is recursive: it stops
847 846 when there is no more {foo} to replace.
848 847
849 848 expansion also allows formatting and filtering.
850 849
851 850 format uses key to expand each item in list. syntax is
852 851 {key%format}.
853 852
854 853 filter uses function to transform value. syntax is
855 854 {key|filter1|filter2|...}.'''
856 855
857 856 def __init__(self, loader, filters=None, defaults=None):
858 857 self._loader = loader
859 858 if filters is None:
860 859 filters = {}
861 860 self._filters = filters
862 861 if defaults is None:
863 862 defaults = {}
864 863 self._defaults = defaults
865 864 self._cache = {}
866 865
867 866 def _load(self, t):
868 867 '''load, parse, and cache a template'''
869 868 if t not in self._cache:
870 869 # put poison to cut recursion while compiling 't'
871 870 self._cache[t] = [(_runrecursivesymbol, t)]
872 871 try:
873 872 self._cache[t] = compiletemplate(self._loader(t), self)
874 873 except: # re-raises
875 874 del self._cache[t]
876 875 raise
877 876 return self._cache[t]
878 877
879 878 def process(self, t, mapping):
880 879 '''Perform expansion. t is name of map element to expand.
881 880 mapping contains added elements for use during expansion. Is a
882 881 generator.'''
883 882 return _flatten(runtemplate(self, mapping, self._load(t)))
884 883
885 884 engines = {'default': engine}
886 885
887 886 def stylelist():
888 887 paths = templatepaths()
889 888 if not paths:
890 889 return _('no templates found, try `hg debuginstall` for more info')
891 890 dirlist = os.listdir(paths[0])
892 891 stylelist = []
893 892 for file in dirlist:
894 893 split = file.split(".")
895 894 if split[0] == "map-cmdline":
896 895 stylelist.append(split[1])
897 896 return ", ".join(sorted(stylelist))
898 897
899 898 class TemplateNotFound(error.Abort):
900 899 pass
901 900
902 901 class templater(object):
903 902
904 903 def __init__(self, mapfile, filters=None, defaults=None, cache=None,
905 904 minchunk=1024, maxchunk=65536):
906 905 '''set up template engine.
907 906 mapfile is name of file to read map definitions from.
908 907 filters is dict of functions. each transforms a value into another.
909 908 defaults is dict of default map definitions.'''
910 909 if filters is None:
911 910 filters = {}
912 911 if defaults is None:
913 912 defaults = {}
914 913 if cache is None:
915 914 cache = {}
916 915 self.mapfile = mapfile or 'template'
917 916 self.cache = cache.copy()
918 917 self.map = {}
919 918 if mapfile:
920 919 self.base = os.path.dirname(mapfile)
921 920 else:
922 921 self.base = ''
923 922 self.filters = templatefilters.filters.copy()
924 923 self.filters.update(filters)
925 924 self.defaults = defaults
926 925 self.minchunk, self.maxchunk = minchunk, maxchunk
927 926 self.ecache = {}
928 927
929 928 if not mapfile:
930 929 return
931 930 if not os.path.exists(mapfile):
932 931 raise error.Abort(_("style '%s' not found") % mapfile,
933 932 hint=_("available styles: %s") % stylelist())
934 933
935 934 conf = config.config(includepaths=templatepaths())
936 935 conf.read(mapfile)
937 936
938 937 for key, val in conf[''].items():
939 938 if not val:
940 939 raise SyntaxError(_('%s: missing value') % conf.source('', key))
941 940 if val[0] in "'\"":
942 941 try:
943 942 self.cache[key] = unquotestring(val)
944 943 except SyntaxError as inst:
945 944 raise SyntaxError('%s: %s' %
946 945 (conf.source('', key), inst.args[0]))
947 946 else:
948 947 val = 'default', val
949 948 if ':' in val[1]:
950 949 val = val[1].split(':', 1)
951 950 self.map[key] = val[0], os.path.join(self.base, val[1])
952 951
953 952 def __contains__(self, key):
954 953 return key in self.cache or key in self.map
955 954
956 955 def load(self, t):
957 956 '''Get the template for the given template name. Use a local cache.'''
958 957 if t not in self.cache:
959 958 try:
960 959 self.cache[t] = util.readfile(self.map[t][1])
961 960 except KeyError as inst:
962 961 raise TemplateNotFound(_('"%s" not in template map') %
963 962 inst.args[0])
964 963 except IOError as inst:
965 964 raise IOError(inst.args[0], _('template file %s: %s') %
966 965 (self.map[t][1], inst.args[1]))
967 966 return self.cache[t]
968 967
969 968 def __call__(self, t, **mapping):
970 969 ttype = t in self.map and self.map[t][0] or 'default'
971 970 if ttype not in self.ecache:
972 971 self.ecache[ttype] = engines[ttype](self.load,
973 972 self.filters, self.defaults)
974 973 proc = self.ecache[ttype]
975 974
976 975 stream = proc.process(t, mapping)
977 976 if self.minchunk:
978 977 stream = util.increasingchunks(stream, min=self.minchunk,
979 978 max=self.maxchunk)
980 979 return stream
981 980
982 981 def templatepaths():
983 982 '''return locations used for template files.'''
984 983 pathsrel = ['templates']
985 984 paths = [os.path.normpath(os.path.join(util.datapath, f))
986 985 for f in pathsrel]
987 986 return [p for p in paths if os.path.isdir(p)]
988 987
989 988 def templatepath(name):
990 989 '''return location of template file. returns None if not found.'''
991 990 for p in templatepaths():
992 991 f = os.path.join(p, name)
993 992 if os.path.exists(f):
994 993 return f
995 994 return None
996 995
997 996 def stylemap(styles, paths=None):
998 997 """Return path to mapfile for a given style.
999 998
1000 999 Searches mapfile in the following locations:
1001 1000 1. templatepath/style/map
1002 1001 2. templatepath/map-style
1003 1002 3. templatepath/map
1004 1003 """
1005 1004
1006 1005 if paths is None:
1007 1006 paths = templatepaths()
1008 1007 elif isinstance(paths, str):
1009 1008 paths = [paths]
1010 1009
1011 1010 if isinstance(styles, str):
1012 1011 styles = [styles]
1013 1012
1014 1013 for style in styles:
1015 1014 # only plain name is allowed to honor template paths
1016 1015 if (not style
1017 1016 or style in (os.curdir, os.pardir)
1018 1017 or os.sep in style
1019 1018 or os.altsep and os.altsep in style):
1020 1019 continue
1021 1020 locations = [os.path.join(style, 'map'), 'map-' + style]
1022 1021 locations.append('map')
1023 1022
1024 1023 for path in paths:
1025 1024 for location in locations:
1026 1025 mapfile = os.path.join(path, location)
1027 1026 if os.path.isfile(mapfile):
1028 1027 return style, mapfile
1029 1028
1030 1029 raise RuntimeError("No hgweb templates found in %r" % paths)
1031 1030
1032 1031 # tell hggettext to extract docstrings from these functions:
1033 1032 i18nfunctions = funcs.values()
@@ -1,355 +1,364 b''
1 1 $ cat <<EOF >> $HGRCPATH
2 2 > [extensions]
3 3 > color =
4 4 > [color]
5 5 > mode = ansi
6 6 > EOF
7 7 Terminfo codes compatibility fix
8 8 $ echo "color.none=0" >> $HGRCPATH
9 9
10 10 $ hg init repo1
11 11 $ cd repo1
12 12 $ mkdir a b a/1 b/1 b/2
13 13 $ touch in_root a/in_a b/in_b a/1/in_a_1 b/1/in_b_1 b/2/in_b_2
14 14
15 15 hg status in repo root:
16 16
17 17 $ hg status --color=always
18 18 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4ma/1/in_a_1\x1b[0m (esc)
19 19 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4ma/in_a\x1b[0m (esc)
20 20 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4mb/1/in_b_1\x1b[0m (esc)
21 21 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4mb/2/in_b_2\x1b[0m (esc)
22 22 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4mb/in_b\x1b[0m (esc)
23 23 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4min_root\x1b[0m (esc)
24 24
25 25 $ hg status --color=debug
26 26 [status.unknown|? ][status.unknown|a/1/in_a_1]
27 27 [status.unknown|? ][status.unknown|a/in_a]
28 28 [status.unknown|? ][status.unknown|b/1/in_b_1]
29 29 [status.unknown|? ][status.unknown|b/2/in_b_2]
30 30 [status.unknown|? ][status.unknown|b/in_b]
31 31 [status.unknown|? ][status.unknown|in_root]
32 32
33 hg status with template
34 $ hg status -T "{label('red', path)}\n" --color=debug
35 [red|a/1/in_a_1]
36 [red|a/in_a]
37 [red|b/1/in_b_1]
38 [red|b/2/in_b_2]
39 [red|b/in_b]
40 [red|in_root]
41
33 42 hg status . in repo root:
34 43
35 44 $ hg status --color=always .
36 45 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4ma/1/in_a_1\x1b[0m (esc)
37 46 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4ma/in_a\x1b[0m (esc)
38 47 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4mb/1/in_b_1\x1b[0m (esc)
39 48 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4mb/2/in_b_2\x1b[0m (esc)
40 49 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4mb/in_b\x1b[0m (esc)
41 50 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4min_root\x1b[0m (esc)
42 51
43 52 $ hg status --color=always --cwd a
44 53 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4ma/1/in_a_1\x1b[0m (esc)
45 54 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4ma/in_a\x1b[0m (esc)
46 55 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4mb/1/in_b_1\x1b[0m (esc)
47 56 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4mb/2/in_b_2\x1b[0m (esc)
48 57 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4mb/in_b\x1b[0m (esc)
49 58 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4min_root\x1b[0m (esc)
50 59 $ hg status --color=always --cwd a .
51 60 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4m1/in_a_1\x1b[0m (esc)
52 61 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4min_a\x1b[0m (esc)
53 62 $ hg status --color=always --cwd a ..
54 63 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4m1/in_a_1\x1b[0m (esc)
55 64 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4min_a\x1b[0m (esc)
56 65 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4m../b/1/in_b_1\x1b[0m (esc)
57 66 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4m../b/2/in_b_2\x1b[0m (esc)
58 67 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4m../b/in_b\x1b[0m (esc)
59 68 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4m../in_root\x1b[0m (esc)
60 69
61 70 $ hg status --color=always --cwd b
62 71 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4ma/1/in_a_1\x1b[0m (esc)
63 72 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4ma/in_a\x1b[0m (esc)
64 73 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4mb/1/in_b_1\x1b[0m (esc)
65 74 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4mb/2/in_b_2\x1b[0m (esc)
66 75 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4mb/in_b\x1b[0m (esc)
67 76 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4min_root\x1b[0m (esc)
68 77 $ hg status --color=always --cwd b .
69 78 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4m1/in_b_1\x1b[0m (esc)
70 79 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4m2/in_b_2\x1b[0m (esc)
71 80 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4min_b\x1b[0m (esc)
72 81 $ hg status --color=always --cwd b ..
73 82 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4m../a/1/in_a_1\x1b[0m (esc)
74 83 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4m../a/in_a\x1b[0m (esc)
75 84 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4m1/in_b_1\x1b[0m (esc)
76 85 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4m2/in_b_2\x1b[0m (esc)
77 86 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4min_b\x1b[0m (esc)
78 87 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4m../in_root\x1b[0m (esc)
79 88
80 89 $ hg status --color=always --cwd a/1
81 90 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4ma/1/in_a_1\x1b[0m (esc)
82 91 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4ma/in_a\x1b[0m (esc)
83 92 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4mb/1/in_b_1\x1b[0m (esc)
84 93 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4mb/2/in_b_2\x1b[0m (esc)
85 94 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4mb/in_b\x1b[0m (esc)
86 95 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4min_root\x1b[0m (esc)
87 96 $ hg status --color=always --cwd a/1 .
88 97 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4min_a_1\x1b[0m (esc)
89 98 $ hg status --color=always --cwd a/1 ..
90 99 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4min_a_1\x1b[0m (esc)
91 100 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4m../in_a\x1b[0m (esc)
92 101
93 102 $ hg status --color=always --cwd b/1
94 103 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4ma/1/in_a_1\x1b[0m (esc)
95 104 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4ma/in_a\x1b[0m (esc)
96 105 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4mb/1/in_b_1\x1b[0m (esc)
97 106 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4mb/2/in_b_2\x1b[0m (esc)
98 107 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4mb/in_b\x1b[0m (esc)
99 108 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4min_root\x1b[0m (esc)
100 109 $ hg status --color=always --cwd b/1 .
101 110 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4min_b_1\x1b[0m (esc)
102 111 $ hg status --color=always --cwd b/1 ..
103 112 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4min_b_1\x1b[0m (esc)
104 113 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4m../2/in_b_2\x1b[0m (esc)
105 114 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4m../in_b\x1b[0m (esc)
106 115
107 116 $ hg status --color=always --cwd b/2
108 117 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4ma/1/in_a_1\x1b[0m (esc)
109 118 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4ma/in_a\x1b[0m (esc)
110 119 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4mb/1/in_b_1\x1b[0m (esc)
111 120 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4mb/2/in_b_2\x1b[0m (esc)
112 121 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4mb/in_b\x1b[0m (esc)
113 122 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4min_root\x1b[0m (esc)
114 123 $ hg status --color=always --cwd b/2 .
115 124 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4min_b_2\x1b[0m (esc)
116 125 $ hg status --color=always --cwd b/2 ..
117 126 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4m../1/in_b_1\x1b[0m (esc)
118 127 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4min_b_2\x1b[0m (esc)
119 128 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4m../in_b\x1b[0m (esc)
120 129
121 130 Make sure --color=never works
122 131 $ hg status --color=never
123 132 ? a/1/in_a_1
124 133 ? a/in_a
125 134 ? b/1/in_b_1
126 135 ? b/2/in_b_2
127 136 ? b/in_b
128 137 ? in_root
129 138
130 139 Make sure ui.formatted=False works
131 140 $ hg status --config ui.formatted=False
132 141 ? a/1/in_a_1
133 142 ? a/in_a
134 143 ? b/1/in_b_1
135 144 ? b/2/in_b_2
136 145 ? b/in_b
137 146 ? in_root
138 147
139 148 $ cd ..
140 149
141 150 $ hg init repo2
142 151 $ cd repo2
143 152 $ touch modified removed deleted ignored
144 153 $ echo "^ignored$" > .hgignore
145 154 $ hg ci -A -m 'initial checkin'
146 155 adding .hgignore
147 156 adding deleted
148 157 adding modified
149 158 adding removed
150 159 $ hg log --color=debug
151 160 [log.changeset changeset.draft|changeset: 0:389aef86a55e]
152 161 [log.tag|tag: tip]
153 162 [log.user|user: test]
154 163 [log.date|date: Thu Jan 01 00:00:00 1970 +0000]
155 164 [log.summary|summary: initial checkin]
156 165
157 166 Labels on empty strings should not be displayed, labels on custom
158 167 templates should be.
159 168
160 169 $ hg log --color=debug -T '{label("my.label",author)}\n{label("skipped.label","")}'
161 170 [my.label|test]
162 171 $ touch modified added unknown ignored
163 172 $ hg add added
164 173 $ hg remove removed
165 174 $ rm deleted
166 175
167 176 hg status:
168 177
169 178 $ hg status --color=always
170 179 \x1b[0;32;1mA \x1b[0m\x1b[0;32;1madded\x1b[0m (esc)
171 180 \x1b[0;31;1mR \x1b[0m\x1b[0;31;1mremoved\x1b[0m (esc)
172 181 \x1b[0;36;1;4m! \x1b[0m\x1b[0;36;1;4mdeleted\x1b[0m (esc)
173 182 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4munknown\x1b[0m (esc)
174 183
175 184 hg status modified added removed deleted unknown never-existed ignored:
176 185
177 186 $ hg status --color=always modified added removed deleted unknown never-existed ignored
178 187 never-existed: * (glob)
179 188 \x1b[0;32;1mA \x1b[0m\x1b[0;32;1madded\x1b[0m (esc)
180 189 \x1b[0;31;1mR \x1b[0m\x1b[0;31;1mremoved\x1b[0m (esc)
181 190 \x1b[0;36;1;4m! \x1b[0m\x1b[0;36;1;4mdeleted\x1b[0m (esc)
182 191 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4munknown\x1b[0m (esc)
183 192
184 193 $ hg copy modified copied
185 194
186 195 hg status -C:
187 196
188 197 $ hg status --color=always -C
189 198 \x1b[0;32;1mA \x1b[0m\x1b[0;32;1madded\x1b[0m (esc)
190 199 \x1b[0;32;1mA \x1b[0m\x1b[0;32;1mcopied\x1b[0m (esc)
191 200 \x1b[0;0m modified\x1b[0m (esc)
192 201 \x1b[0;31;1mR \x1b[0m\x1b[0;31;1mremoved\x1b[0m (esc)
193 202 \x1b[0;36;1;4m! \x1b[0m\x1b[0;36;1;4mdeleted\x1b[0m (esc)
194 203 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4munknown\x1b[0m (esc)
195 204
196 205 hg status -A:
197 206
198 207 $ hg status --color=always -A
199 208 \x1b[0;32;1mA \x1b[0m\x1b[0;32;1madded\x1b[0m (esc)
200 209 \x1b[0;32;1mA \x1b[0m\x1b[0;32;1mcopied\x1b[0m (esc)
201 210 \x1b[0;0m modified\x1b[0m (esc)
202 211 \x1b[0;31;1mR \x1b[0m\x1b[0;31;1mremoved\x1b[0m (esc)
203 212 \x1b[0;36;1;4m! \x1b[0m\x1b[0;36;1;4mdeleted\x1b[0m (esc)
204 213 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4munknown\x1b[0m (esc)
205 214 \x1b[0;30;1mI \x1b[0m\x1b[0;30;1mignored\x1b[0m (esc)
206 215 \x1b[0;0mC \x1b[0m\x1b[0;0m.hgignore\x1b[0m (esc)
207 216 \x1b[0;0mC \x1b[0m\x1b[0;0mmodified\x1b[0m (esc)
208 217
209 218
210 219 hg status -A (with terminfo color):
211 220
212 221 #if tic
213 222
214 223 $ mkdir "$TESTTMP/terminfo"
215 224 $ TERMINFO="$TESTTMP/terminfo" tic "$TESTDIR/hgterm.ti"
216 225 $ TERM=hgterm TERMINFO="$TESTTMP/terminfo" hg status --config color.mode=terminfo --color=always -A
217 226 \x1b[30m\x1b[32m\x1b[1mA \x1b[30m\x1b[30m\x1b[32m\x1b[1madded\x1b[30m (esc)
218 227 \x1b[30m\x1b[32m\x1b[1mA \x1b[30m\x1b[30m\x1b[32m\x1b[1mcopied\x1b[30m (esc)
219 228 \x1b[30m\x1b[30m modified\x1b[30m (esc)
220 229 \x1b[30m\x1b[31m\x1b[1mR \x1b[30m\x1b[30m\x1b[31m\x1b[1mremoved\x1b[30m (esc)
221 230 \x1b[30m\x1b[36m\x1b[1m\x1b[4m! \x1b[30m\x1b[30m\x1b[36m\x1b[1m\x1b[4mdeleted\x1b[30m (esc)
222 231 \x1b[30m\x1b[35m\x1b[1m\x1b[4m? \x1b[30m\x1b[30m\x1b[35m\x1b[1m\x1b[4munknown\x1b[30m (esc)
223 232 \x1b[30m\x1b[30m\x1b[1mI \x1b[30m\x1b[30m\x1b[30m\x1b[1mignored\x1b[30m (esc)
224 233 \x1b[30m\x1b[30mC \x1b[30m\x1b[30m\x1b[30m.hgignore\x1b[30m (esc)
225 234 \x1b[30m\x1b[30mC \x1b[30m\x1b[30m\x1b[30mmodified\x1b[30m (esc)
226 235
227 236 #endif
228 237
229 238
230 239 $ echo "^ignoreddir$" > .hgignore
231 240 $ mkdir ignoreddir
232 241 $ touch ignoreddir/file
233 242
234 243 hg status ignoreddir/file:
235 244
236 245 $ hg status --color=always ignoreddir/file
237 246
238 247 hg status -i ignoreddir/file:
239 248
240 249 $ hg status --color=always -i ignoreddir/file
241 250 \x1b[0;30;1mI \x1b[0m\x1b[0;30;1mignoreddir/file\x1b[0m (esc)
242 251 $ cd ..
243 252
244 253 check 'status -q' and some combinations
245 254
246 255 $ hg init repo3
247 256 $ cd repo3
248 257 $ touch modified removed deleted ignored
249 258 $ echo "^ignored$" > .hgignore
250 259 $ hg commit -A -m 'initial checkin'
251 260 adding .hgignore
252 261 adding deleted
253 262 adding modified
254 263 adding removed
255 264 $ touch added unknown ignored
256 265 $ hg add added
257 266 $ echo "test" >> modified
258 267 $ hg remove removed
259 268 $ rm deleted
260 269 $ hg copy modified copied
261 270
262 271 test unknown color
263 272
264 273 $ hg --config color.status.modified=periwinkle status --color=always
265 274 ignoring unknown color/effect 'periwinkle' (configured in color.status.modified)
266 275 M modified
267 276 \x1b[0;32;1mA \x1b[0m\x1b[0;32;1madded\x1b[0m (esc)
268 277 \x1b[0;32;1mA \x1b[0m\x1b[0;32;1mcopied\x1b[0m (esc)
269 278 \x1b[0;31;1mR \x1b[0m\x1b[0;31;1mremoved\x1b[0m (esc)
270 279 \x1b[0;36;1;4m! \x1b[0m\x1b[0;36;1;4mdeleted\x1b[0m (esc)
271 280 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4munknown\x1b[0m (esc)
272 281
273 282 Run status with 2 different flags.
274 283 Check if result is the same or different.
275 284 If result is not as expected, raise error
276 285
277 286 $ assert() {
278 287 > hg status --color=always $1 > ../a
279 288 > hg status --color=always $2 > ../b
280 289 > if diff ../a ../b > /dev/null; then
281 290 > out=0
282 291 > else
283 292 > out=1
284 293 > fi
285 294 > if [ $3 -eq 0 ]; then
286 295 > df="same"
287 296 > else
288 297 > df="different"
289 298 > fi
290 299 > if [ $out -ne $3 ]; then
291 300 > echo "Error on $1 and $2, should be $df."
292 301 > fi
293 302 > }
294 303
295 304 assert flag1 flag2 [0-same | 1-different]
296 305
297 306 $ assert "-q" "-mard" 0
298 307 $ assert "-A" "-marduicC" 0
299 308 $ assert "-qA" "-mardcC" 0
300 309 $ assert "-qAui" "-A" 0
301 310 $ assert "-qAu" "-marducC" 0
302 311 $ assert "-qAi" "-mardicC" 0
303 312 $ assert "-qu" "-u" 0
304 313 $ assert "-q" "-u" 1
305 314 $ assert "-m" "-a" 1
306 315 $ assert "-r" "-d" 1
307 316 $ cd ..
308 317
309 318 test 'resolve -l'
310 319
311 320 $ hg init repo4
312 321 $ cd repo4
313 322 $ echo "file a" > a
314 323 $ echo "file b" > b
315 324 $ hg add a b
316 325 $ hg commit -m "initial"
317 326 $ echo "file a change 1" > a
318 327 $ echo "file b change 1" > b
319 328 $ hg commit -m "head 1"
320 329 $ hg update 0
321 330 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
322 331 $ echo "file a change 2" > a
323 332 $ echo "file b change 2" > b
324 333 $ hg commit -m "head 2"
325 334 created new head
326 335 $ hg merge
327 336 merging a
328 337 merging b
329 338 warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
330 339 warning: conflicts while merging b! (edit, then use 'hg resolve --mark')
331 340 0 files updated, 0 files merged, 0 files removed, 2 files unresolved
332 341 use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
333 342 [1]
334 343 $ hg resolve -m b
335 344
336 345 hg resolve with one unresolved, one resolved:
337 346
338 347 $ hg resolve --color=always -l
339 348 \x1b[0;31;1mU \x1b[0m\x1b[0;31;1ma\x1b[0m (esc)
340 349 \x1b[0;32;1mR \x1b[0m\x1b[0;32;1mb\x1b[0m (esc)
341 350
342 351 color coding of error message with current availability of curses
343 352
344 353 $ hg unknowncommand > /dev/null
345 354 hg: unknown command 'unknowncommand'
346 355 [255]
347 356
348 357 color coding of error message without curses
349 358
350 359 $ echo 'raise ImportError' > curses.py
351 360 $ PYTHONPATH=`pwd`:$PYTHONPATH hg unknowncommand > /dev/null
352 361 hg: unknown command 'unknowncommand'
353 362 [255]
354 363
355 364 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now