##// END OF EJS Templates
formatter: move most of template option helper to formatter...
Matt Mackall -
r25511:c2a4dfe2 default
parent child Browse files
Show More
@@ -1,3356 +1,3324
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import hex, nullid, nullrev, short
9 9 from i18n import _
10 10 import os, sys, errno, re, tempfile, cStringIO, shutil
11 11 import util, scmutil, templater, patch, error, templatekw, revlog, copies
12 12 import match as matchmod
13 13 import context, repair, graphmod, revset, phases, obsolete, pathutil
14 14 import changelog
15 15 import bookmarks
16 16 import encoding
17 import formatter
17 18 import crecord as crecordmod
18 19 import lock as lockmod
19 20
20 21 def ishunk(x):
21 22 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
22 23 return isinstance(x, hunkclasses)
23 24
24 25 def newandmodified(chunks, originalchunks):
25 26 newlyaddedandmodifiedfiles = set()
26 27 for chunk in chunks:
27 28 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
28 29 originalchunks:
29 30 newlyaddedandmodifiedfiles.add(chunk.header.filename())
30 31 return newlyaddedandmodifiedfiles
31 32
32 33 def parsealiases(cmd):
33 34 return cmd.lstrip("^").split("|")
34 35
35 36 def setupwrapcolorwrite(ui):
36 37 # wrap ui.write so diff output can be labeled/colorized
37 38 def wrapwrite(orig, *args, **kw):
38 39 label = kw.pop('label', '')
39 40 for chunk, l in patch.difflabel(lambda: args):
40 41 orig(chunk, label=label + l)
41 42
42 43 oldwrite = ui.write
43 44 def wrap(*args, **kwargs):
44 45 return wrapwrite(oldwrite, *args, **kwargs)
45 46 setattr(ui, 'write', wrap)
46 47 return oldwrite
47 48
48 49 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
49 50 if usecurses:
50 51 if testfile:
51 52 recordfn = crecordmod.testdecorator(testfile,
52 53 crecordmod.testchunkselector)
53 54 else:
54 55 recordfn = crecordmod.chunkselector
55 56
56 57 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
57 58
58 59 else:
59 60 return patch.filterpatch(ui, originalhunks, operation)
60 61
61 62 def recordfilter(ui, originalhunks, operation=None):
62 63 """ Prompts the user to filter the originalhunks and return a list of
63 64 selected hunks.
64 65 *operation* is used for ui purposes to indicate the user
65 66 what kind of filtering they are doing: reverting, commiting, shelving, etc.
66 67 *operation* has to be a translated string.
67 68 """
68 69 usecurses = ui.configbool('experimental', 'crecord', False)
69 70 testfile = ui.config('experimental', 'crecordtest', None)
70 71 oldwrite = setupwrapcolorwrite(ui)
71 72 try:
72 73 newchunks = filterchunks(ui, originalhunks, usecurses, testfile,
73 74 operation)
74 75 finally:
75 76 ui.write = oldwrite
76 77 return newchunks
77 78
78 79 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
79 80 filterfn, *pats, **opts):
80 81 import merge as mergemod
81 82
82 83 if not ui.interactive():
83 84 raise util.Abort(_('running non-interactively, use %s instead') %
84 85 cmdsuggest)
85 86
86 87 # make sure username is set before going interactive
87 88 if not opts.get('user'):
88 89 ui.username() # raise exception, username not provided
89 90
90 91 def recordfunc(ui, repo, message, match, opts):
91 92 """This is generic record driver.
92 93
93 94 Its job is to interactively filter local changes, and
94 95 accordingly prepare working directory into a state in which the
95 96 job can be delegated to a non-interactive commit command such as
96 97 'commit' or 'qrefresh'.
97 98
98 99 After the actual job is done by non-interactive command, the
99 100 working directory is restored to its original state.
100 101
101 102 In the end we'll record interesting changes, and everything else
102 103 will be left in place, so the user can continue working.
103 104 """
104 105
105 106 checkunfinished(repo, commit=True)
106 107 merge = len(repo[None].parents()) > 1
107 108 if merge:
108 109 raise util.Abort(_('cannot partially commit a merge '
109 110 '(use "hg commit" instead)'))
110 111
111 112 status = repo.status(match=match)
112 113 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
113 114 diffopts.nodates = True
114 115 diffopts.git = True
115 116 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
116 117 originalchunks = patch.parsepatch(originaldiff)
117 118
118 119 # 1. filter patch, so we have intending-to apply subset of it
119 120 try:
120 121 chunks = filterfn(ui, originalchunks)
121 122 except patch.PatchError, err:
122 123 raise util.Abort(_('error parsing patch: %s') % err)
123 124
124 125 # We need to keep a backup of files that have been newly added and
125 126 # modified during the recording process because there is a previous
126 127 # version without the edit in the workdir
127 128 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
128 129 contenders = set()
129 130 for h in chunks:
130 131 try:
131 132 contenders.update(set(h.files()))
132 133 except AttributeError:
133 134 pass
134 135
135 136 changed = status.modified + status.added + status.removed
136 137 newfiles = [f for f in changed if f in contenders]
137 138 if not newfiles:
138 139 ui.status(_('no changes to record\n'))
139 140 return 0
140 141
141 142 modified = set(status.modified)
142 143
143 144 # 2. backup changed files, so we can restore them in the end
144 145
145 146 if backupall:
146 147 tobackup = changed
147 148 else:
148 149 tobackup = [f for f in newfiles if f in modified or f in \
149 150 newlyaddedandmodifiedfiles]
150 151 backups = {}
151 152 if tobackup:
152 153 backupdir = repo.join('record-backups')
153 154 try:
154 155 os.mkdir(backupdir)
155 156 except OSError, err:
156 157 if err.errno != errno.EEXIST:
157 158 raise
158 159 try:
159 160 # backup continues
160 161 for f in tobackup:
161 162 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
162 163 dir=backupdir)
163 164 os.close(fd)
164 165 ui.debug('backup %r as %r\n' % (f, tmpname))
165 166 util.copyfile(repo.wjoin(f), tmpname)
166 167 shutil.copystat(repo.wjoin(f), tmpname)
167 168 backups[f] = tmpname
168 169
169 170 fp = cStringIO.StringIO()
170 171 for c in chunks:
171 172 fname = c.filename()
172 173 if fname in backups:
173 174 c.write(fp)
174 175 dopatch = fp.tell()
175 176 fp.seek(0)
176 177
177 178 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
178 179 # 3a. apply filtered patch to clean repo (clean)
179 180 if backups:
180 181 # Equivalent to hg.revert
181 182 choices = lambda key: key in backups
182 183 mergemod.update(repo, repo.dirstate.p1(),
183 184 False, True, choices)
184 185
185 186 # 3b. (apply)
186 187 if dopatch:
187 188 try:
188 189 ui.debug('applying patch\n')
189 190 ui.debug(fp.getvalue())
190 191 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
191 192 except patch.PatchError, err:
192 193 raise util.Abort(str(err))
193 194 del fp
194 195
195 196 # 4. We prepared working directory according to filtered
196 197 # patch. Now is the time to delegate the job to
197 198 # commit/qrefresh or the like!
198 199
199 200 # Make all of the pathnames absolute.
200 201 newfiles = [repo.wjoin(nf) for nf in newfiles]
201 202 return commitfunc(ui, repo, *newfiles, **opts)
202 203 finally:
203 204 # 5. finally restore backed-up files
204 205 try:
205 206 for realname, tmpname in backups.iteritems():
206 207 ui.debug('restoring %r to %r\n' % (tmpname, realname))
207 208 util.copyfile(tmpname, repo.wjoin(realname))
208 209 # Our calls to copystat() here and above are a
209 210 # hack to trick any editors that have f open that
210 211 # we haven't modified them.
211 212 #
212 213 # Also note that this racy as an editor could
213 214 # notice the file's mtime before we've finished
214 215 # writing it.
215 216 shutil.copystat(tmpname, repo.wjoin(realname))
216 217 os.unlink(tmpname)
217 218 if tobackup:
218 219 os.rmdir(backupdir)
219 220 except OSError:
220 221 pass
221 222
222 223 return commit(ui, repo, recordfunc, pats, opts)
223 224
224 225 def findpossible(cmd, table, strict=False):
225 226 """
226 227 Return cmd -> (aliases, command table entry)
227 228 for each matching command.
228 229 Return debug commands (or their aliases) only if no normal command matches.
229 230 """
230 231 choice = {}
231 232 debugchoice = {}
232 233
233 234 if cmd in table:
234 235 # short-circuit exact matches, "log" alias beats "^log|history"
235 236 keys = [cmd]
236 237 else:
237 238 keys = table.keys()
238 239
239 240 allcmds = []
240 241 for e in keys:
241 242 aliases = parsealiases(e)
242 243 allcmds.extend(aliases)
243 244 found = None
244 245 if cmd in aliases:
245 246 found = cmd
246 247 elif not strict:
247 248 for a in aliases:
248 249 if a.startswith(cmd):
249 250 found = a
250 251 break
251 252 if found is not None:
252 253 if aliases[0].startswith("debug") or found.startswith("debug"):
253 254 debugchoice[found] = (aliases, table[e])
254 255 else:
255 256 choice[found] = (aliases, table[e])
256 257
257 258 if not choice and debugchoice:
258 259 choice = debugchoice
259 260
260 261 return choice, allcmds
261 262
262 263 def findcmd(cmd, table, strict=True):
263 264 """Return (aliases, command table entry) for command string."""
264 265 choice, allcmds = findpossible(cmd, table, strict)
265 266
266 267 if cmd in choice:
267 268 return choice[cmd]
268 269
269 270 if len(choice) > 1:
270 271 clist = choice.keys()
271 272 clist.sort()
272 273 raise error.AmbiguousCommand(cmd, clist)
273 274
274 275 if choice:
275 276 return choice.values()[0]
276 277
277 278 raise error.UnknownCommand(cmd, allcmds)
278 279
279 280 def findrepo(p):
280 281 while not os.path.isdir(os.path.join(p, ".hg")):
281 282 oldp, p = p, os.path.dirname(p)
282 283 if p == oldp:
283 284 return None
284 285
285 286 return p
286 287
287 288 def bailifchanged(repo, merge=True):
288 289 if merge and repo.dirstate.p2() != nullid:
289 290 raise util.Abort(_('outstanding uncommitted merge'))
290 291 modified, added, removed, deleted = repo.status()[:4]
291 292 if modified or added or removed or deleted:
292 293 raise util.Abort(_('uncommitted changes'))
293 294 ctx = repo[None]
294 295 for s in sorted(ctx.substate):
295 296 ctx.sub(s).bailifchanged()
296 297
297 298 def logmessage(ui, opts):
298 299 """ get the log message according to -m and -l option """
299 300 message = opts.get('message')
300 301 logfile = opts.get('logfile')
301 302
302 303 if message and logfile:
303 304 raise util.Abort(_('options --message and --logfile are mutually '
304 305 'exclusive'))
305 306 if not message and logfile:
306 307 try:
307 308 if logfile == '-':
308 309 message = ui.fin.read()
309 310 else:
310 311 message = '\n'.join(util.readfile(logfile).splitlines())
311 312 except IOError, inst:
312 313 raise util.Abort(_("can't read commit message '%s': %s") %
313 314 (logfile, inst.strerror))
314 315 return message
315 316
316 317 def mergeeditform(ctxorbool, baseformname):
317 318 """return appropriate editform name (referencing a committemplate)
318 319
319 320 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
320 321 merging is committed.
321 322
322 323 This returns baseformname with '.merge' appended if it is a merge,
323 324 otherwise '.normal' is appended.
324 325 """
325 326 if isinstance(ctxorbool, bool):
326 327 if ctxorbool:
327 328 return baseformname + ".merge"
328 329 elif 1 < len(ctxorbool.parents()):
329 330 return baseformname + ".merge"
330 331
331 332 return baseformname + ".normal"
332 333
333 334 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
334 335 editform='', **opts):
335 336 """get appropriate commit message editor according to '--edit' option
336 337
337 338 'finishdesc' is a function to be called with edited commit message
338 339 (= 'description' of the new changeset) just after editing, but
339 340 before checking empty-ness. It should return actual text to be
340 341 stored into history. This allows to change description before
341 342 storing.
342 343
343 344 'extramsg' is a extra message to be shown in the editor instead of
344 345 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
345 346 is automatically added.
346 347
347 348 'editform' is a dot-separated list of names, to distinguish
348 349 the purpose of commit text editing.
349 350
350 351 'getcommiteditor' returns 'commitforceeditor' regardless of
351 352 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
352 353 they are specific for usage in MQ.
353 354 """
354 355 if edit or finishdesc or extramsg:
355 356 return lambda r, c, s: commitforceeditor(r, c, s,
356 357 finishdesc=finishdesc,
357 358 extramsg=extramsg,
358 359 editform=editform)
359 360 elif editform:
360 361 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
361 362 else:
362 363 return commiteditor
363 364
364 365 def loglimit(opts):
365 366 """get the log limit according to option -l/--limit"""
366 367 limit = opts.get('limit')
367 368 if limit:
368 369 try:
369 370 limit = int(limit)
370 371 except ValueError:
371 372 raise util.Abort(_('limit must be a positive integer'))
372 373 if limit <= 0:
373 374 raise util.Abort(_('limit must be positive'))
374 375 else:
375 376 limit = None
376 377 return limit
377 378
378 379 def makefilename(repo, pat, node, desc=None,
379 380 total=None, seqno=None, revwidth=None, pathname=None):
380 381 node_expander = {
381 382 'H': lambda: hex(node),
382 383 'R': lambda: str(repo.changelog.rev(node)),
383 384 'h': lambda: short(node),
384 385 'm': lambda: re.sub('[^\w]', '_', str(desc))
385 386 }
386 387 expander = {
387 388 '%': lambda: '%',
388 389 'b': lambda: os.path.basename(repo.root),
389 390 }
390 391
391 392 try:
392 393 if node:
393 394 expander.update(node_expander)
394 395 if node:
395 396 expander['r'] = (lambda:
396 397 str(repo.changelog.rev(node)).zfill(revwidth or 0))
397 398 if total is not None:
398 399 expander['N'] = lambda: str(total)
399 400 if seqno is not None:
400 401 expander['n'] = lambda: str(seqno)
401 402 if total is not None and seqno is not None:
402 403 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
403 404 if pathname is not None:
404 405 expander['s'] = lambda: os.path.basename(pathname)
405 406 expander['d'] = lambda: os.path.dirname(pathname) or '.'
406 407 expander['p'] = lambda: pathname
407 408
408 409 newname = []
409 410 patlen = len(pat)
410 411 i = 0
411 412 while i < patlen:
412 413 c = pat[i]
413 414 if c == '%':
414 415 i += 1
415 416 c = pat[i]
416 417 c = expander[c]()
417 418 newname.append(c)
418 419 i += 1
419 420 return ''.join(newname)
420 421 except KeyError, inst:
421 422 raise util.Abort(_("invalid format spec '%%%s' in output filename") %
422 423 inst.args[0])
423 424
424 425 def makefileobj(repo, pat, node=None, desc=None, total=None,
425 426 seqno=None, revwidth=None, mode='wb', modemap=None,
426 427 pathname=None):
427 428
428 429 writable = mode not in ('r', 'rb')
429 430
430 431 if not pat or pat == '-':
431 432 if writable:
432 433 fp = repo.ui.fout
433 434 else:
434 435 fp = repo.ui.fin
435 436 if util.safehasattr(fp, 'fileno'):
436 437 return os.fdopen(os.dup(fp.fileno()), mode)
437 438 else:
438 439 # if this fp can't be duped properly, return
439 440 # a dummy object that can be closed
440 441 class wrappedfileobj(object):
441 442 noop = lambda x: None
442 443 def __init__(self, f):
443 444 self.f = f
444 445 def __getattr__(self, attr):
445 446 if attr == 'close':
446 447 return self.noop
447 448 else:
448 449 return getattr(self.f, attr)
449 450
450 451 return wrappedfileobj(fp)
451 452 if util.safehasattr(pat, 'write') and writable:
452 453 return pat
453 454 if util.safehasattr(pat, 'read') and 'r' in mode:
454 455 return pat
455 456 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
456 457 if modemap is not None:
457 458 mode = modemap.get(fn, mode)
458 459 if mode == 'wb':
459 460 modemap[fn] = 'ab'
460 461 return open(fn, mode)
461 462
462 463 def openrevlog(repo, cmd, file_, opts):
463 464 """opens the changelog, manifest, a filelog or a given revlog"""
464 465 cl = opts['changelog']
465 466 mf = opts['manifest']
466 467 dir = opts['dir']
467 468 msg = None
468 469 if cl and mf:
469 470 msg = _('cannot specify --changelog and --manifest at the same time')
470 471 elif cl and dir:
471 472 msg = _('cannot specify --changelog and --dir at the same time')
472 473 elif cl or mf:
473 474 if file_:
474 475 msg = _('cannot specify filename with --changelog or --manifest')
475 476 elif not repo:
476 477 msg = _('cannot specify --changelog or --manifest or --dir '
477 478 'without a repository')
478 479 if msg:
479 480 raise util.Abort(msg)
480 481
481 482 r = None
482 483 if repo:
483 484 if cl:
484 485 r = repo.unfiltered().changelog
485 486 elif dir:
486 487 if 'treemanifest' not in repo.requirements:
487 488 raise util.Abort(_("--dir can only be used on repos with "
488 489 "treemanifest enabled"))
489 490 dirlog = repo.dirlog(file_)
490 491 if len(dirlog):
491 492 r = dirlog
492 493 elif mf:
493 494 r = repo.manifest
494 495 elif file_:
495 496 filelog = repo.file(file_)
496 497 if len(filelog):
497 498 r = filelog
498 499 if not r:
499 500 if not file_:
500 501 raise error.CommandError(cmd, _('invalid arguments'))
501 502 if not os.path.isfile(file_):
502 503 raise util.Abort(_("revlog '%s' not found") % file_)
503 504 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
504 505 file_[:-2] + ".i")
505 506 return r
506 507
507 508 def copy(ui, repo, pats, opts, rename=False):
508 509 # called with the repo lock held
509 510 #
510 511 # hgsep => pathname that uses "/" to separate directories
511 512 # ossep => pathname that uses os.sep to separate directories
512 513 cwd = repo.getcwd()
513 514 targets = {}
514 515 after = opts.get("after")
515 516 dryrun = opts.get("dry_run")
516 517 wctx = repo[None]
517 518
518 519 def walkpat(pat):
519 520 srcs = []
520 521 if after:
521 522 badstates = '?'
522 523 else:
523 524 badstates = '?r'
524 525 m = scmutil.match(repo[None], [pat], opts, globbed=True)
525 526 for abs in repo.walk(m):
526 527 state = repo.dirstate[abs]
527 528 rel = m.rel(abs)
528 529 exact = m.exact(abs)
529 530 if state in badstates:
530 531 if exact and state == '?':
531 532 ui.warn(_('%s: not copying - file is not managed\n') % rel)
532 533 if exact and state == 'r':
533 534 ui.warn(_('%s: not copying - file has been marked for'
534 535 ' remove\n') % rel)
535 536 continue
536 537 # abs: hgsep
537 538 # rel: ossep
538 539 srcs.append((abs, rel, exact))
539 540 return srcs
540 541
541 542 # abssrc: hgsep
542 543 # relsrc: ossep
543 544 # otarget: ossep
544 545 def copyfile(abssrc, relsrc, otarget, exact):
545 546 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
546 547 if '/' in abstarget:
547 548 # We cannot normalize abstarget itself, this would prevent
548 549 # case only renames, like a => A.
549 550 abspath, absname = abstarget.rsplit('/', 1)
550 551 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
551 552 reltarget = repo.pathto(abstarget, cwd)
552 553 target = repo.wjoin(abstarget)
553 554 src = repo.wjoin(abssrc)
554 555 state = repo.dirstate[abstarget]
555 556
556 557 scmutil.checkportable(ui, abstarget)
557 558
558 559 # check for collisions
559 560 prevsrc = targets.get(abstarget)
560 561 if prevsrc is not None:
561 562 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
562 563 (reltarget, repo.pathto(abssrc, cwd),
563 564 repo.pathto(prevsrc, cwd)))
564 565 return
565 566
566 567 # check for overwrites
567 568 exists = os.path.lexists(target)
568 569 samefile = False
569 570 if exists and abssrc != abstarget:
570 571 if (repo.dirstate.normalize(abssrc) ==
571 572 repo.dirstate.normalize(abstarget)):
572 573 if not rename:
573 574 ui.warn(_("%s: can't copy - same file\n") % reltarget)
574 575 return
575 576 exists = False
576 577 samefile = True
577 578
578 579 if not after and exists or after and state in 'mn':
579 580 if not opts['force']:
580 581 ui.warn(_('%s: not overwriting - file exists\n') %
581 582 reltarget)
582 583 return
583 584
584 585 if after:
585 586 if not exists:
586 587 if rename:
587 588 ui.warn(_('%s: not recording move - %s does not exist\n') %
588 589 (relsrc, reltarget))
589 590 else:
590 591 ui.warn(_('%s: not recording copy - %s does not exist\n') %
591 592 (relsrc, reltarget))
592 593 return
593 594 elif not dryrun:
594 595 try:
595 596 if exists:
596 597 os.unlink(target)
597 598 targetdir = os.path.dirname(target) or '.'
598 599 if not os.path.isdir(targetdir):
599 600 os.makedirs(targetdir)
600 601 if samefile:
601 602 tmp = target + "~hgrename"
602 603 os.rename(src, tmp)
603 604 os.rename(tmp, target)
604 605 else:
605 606 util.copyfile(src, target)
606 607 srcexists = True
607 608 except IOError, inst:
608 609 if inst.errno == errno.ENOENT:
609 610 ui.warn(_('%s: deleted in working directory\n') % relsrc)
610 611 srcexists = False
611 612 else:
612 613 ui.warn(_('%s: cannot copy - %s\n') %
613 614 (relsrc, inst.strerror))
614 615 return True # report a failure
615 616
616 617 if ui.verbose or not exact:
617 618 if rename:
618 619 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
619 620 else:
620 621 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
621 622
622 623 targets[abstarget] = abssrc
623 624
624 625 # fix up dirstate
625 626 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
626 627 dryrun=dryrun, cwd=cwd)
627 628 if rename and not dryrun:
628 629 if not after and srcexists and not samefile:
629 630 util.unlinkpath(repo.wjoin(abssrc))
630 631 wctx.forget([abssrc])
631 632
632 633 # pat: ossep
633 634 # dest ossep
634 635 # srcs: list of (hgsep, hgsep, ossep, bool)
635 636 # return: function that takes hgsep and returns ossep
636 637 def targetpathfn(pat, dest, srcs):
637 638 if os.path.isdir(pat):
638 639 abspfx = pathutil.canonpath(repo.root, cwd, pat)
639 640 abspfx = util.localpath(abspfx)
640 641 if destdirexists:
641 642 striplen = len(os.path.split(abspfx)[0])
642 643 else:
643 644 striplen = len(abspfx)
644 645 if striplen:
645 646 striplen += len(os.sep)
646 647 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
647 648 elif destdirexists:
648 649 res = lambda p: os.path.join(dest,
649 650 os.path.basename(util.localpath(p)))
650 651 else:
651 652 res = lambda p: dest
652 653 return res
653 654
654 655 # pat: ossep
655 656 # dest ossep
656 657 # srcs: list of (hgsep, hgsep, ossep, bool)
657 658 # return: function that takes hgsep and returns ossep
658 659 def targetpathafterfn(pat, dest, srcs):
659 660 if matchmod.patkind(pat):
660 661 # a mercurial pattern
661 662 res = lambda p: os.path.join(dest,
662 663 os.path.basename(util.localpath(p)))
663 664 else:
664 665 abspfx = pathutil.canonpath(repo.root, cwd, pat)
665 666 if len(abspfx) < len(srcs[0][0]):
666 667 # A directory. Either the target path contains the last
667 668 # component of the source path or it does not.
668 669 def evalpath(striplen):
669 670 score = 0
670 671 for s in srcs:
671 672 t = os.path.join(dest, util.localpath(s[0])[striplen:])
672 673 if os.path.lexists(t):
673 674 score += 1
674 675 return score
675 676
676 677 abspfx = util.localpath(abspfx)
677 678 striplen = len(abspfx)
678 679 if striplen:
679 680 striplen += len(os.sep)
680 681 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
681 682 score = evalpath(striplen)
682 683 striplen1 = len(os.path.split(abspfx)[0])
683 684 if striplen1:
684 685 striplen1 += len(os.sep)
685 686 if evalpath(striplen1) > score:
686 687 striplen = striplen1
687 688 res = lambda p: os.path.join(dest,
688 689 util.localpath(p)[striplen:])
689 690 else:
690 691 # a file
691 692 if destdirexists:
692 693 res = lambda p: os.path.join(dest,
693 694 os.path.basename(util.localpath(p)))
694 695 else:
695 696 res = lambda p: dest
696 697 return res
697 698
698 699 pats = scmutil.expandpats(pats)
699 700 if not pats:
700 701 raise util.Abort(_('no source or destination specified'))
701 702 if len(pats) == 1:
702 703 raise util.Abort(_('no destination specified'))
703 704 dest = pats.pop()
704 705 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
705 706 if not destdirexists:
706 707 if len(pats) > 1 or matchmod.patkind(pats[0]):
707 708 raise util.Abort(_('with multiple sources, destination must be an '
708 709 'existing directory'))
709 710 if util.endswithsep(dest):
710 711 raise util.Abort(_('destination %s is not a directory') % dest)
711 712
712 713 tfn = targetpathfn
713 714 if after:
714 715 tfn = targetpathafterfn
715 716 copylist = []
716 717 for pat in pats:
717 718 srcs = walkpat(pat)
718 719 if not srcs:
719 720 continue
720 721 copylist.append((tfn(pat, dest, srcs), srcs))
721 722 if not copylist:
722 723 raise util.Abort(_('no files to copy'))
723 724
724 725 errors = 0
725 726 for targetpath, srcs in copylist:
726 727 for abssrc, relsrc, exact in srcs:
727 728 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
728 729 errors += 1
729 730
730 731 if errors:
731 732 ui.warn(_('(consider using --after)\n'))
732 733
733 734 return errors != 0
734 735
735 736 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
736 737 runargs=None, appendpid=False):
737 738 '''Run a command as a service.'''
738 739
739 740 def writepid(pid):
740 741 if opts['pid_file']:
741 742 if appendpid:
742 743 mode = 'a'
743 744 else:
744 745 mode = 'w'
745 746 fp = open(opts['pid_file'], mode)
746 747 fp.write(str(pid) + '\n')
747 748 fp.close()
748 749
749 750 if opts['daemon'] and not opts['daemon_pipefds']:
750 751 # Signal child process startup with file removal
751 752 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
752 753 os.close(lockfd)
753 754 try:
754 755 if not runargs:
755 756 runargs = util.hgcmd() + sys.argv[1:]
756 757 runargs.append('--daemon-pipefds=%s' % lockpath)
757 758 # Don't pass --cwd to the child process, because we've already
758 759 # changed directory.
759 760 for i in xrange(1, len(runargs)):
760 761 if runargs[i].startswith('--cwd='):
761 762 del runargs[i]
762 763 break
763 764 elif runargs[i].startswith('--cwd'):
764 765 del runargs[i:i + 2]
765 766 break
766 767 def condfn():
767 768 return not os.path.exists(lockpath)
768 769 pid = util.rundetached(runargs, condfn)
769 770 if pid < 0:
770 771 raise util.Abort(_('child process failed to start'))
771 772 writepid(pid)
772 773 finally:
773 774 try:
774 775 os.unlink(lockpath)
775 776 except OSError, e:
776 777 if e.errno != errno.ENOENT:
777 778 raise
778 779 if parentfn:
779 780 return parentfn(pid)
780 781 else:
781 782 return
782 783
783 784 if initfn:
784 785 initfn()
785 786
786 787 if not opts['daemon']:
787 788 writepid(os.getpid())
788 789
789 790 if opts['daemon_pipefds']:
790 791 lockpath = opts['daemon_pipefds']
791 792 try:
792 793 os.setsid()
793 794 except AttributeError:
794 795 pass
795 796 os.unlink(lockpath)
796 797 util.hidewindow()
797 798 sys.stdout.flush()
798 799 sys.stderr.flush()
799 800
800 801 nullfd = os.open(os.devnull, os.O_RDWR)
801 802 logfilefd = nullfd
802 803 if logfile:
803 804 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
804 805 os.dup2(nullfd, 0)
805 806 os.dup2(logfilefd, 1)
806 807 os.dup2(logfilefd, 2)
807 808 if nullfd not in (0, 1, 2):
808 809 os.close(nullfd)
809 810 if logfile and logfilefd not in (0, 1, 2):
810 811 os.close(logfilefd)
811 812
812 813 if runfn:
813 814 return runfn()
814 815
815 816 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
816 817 """Utility function used by commands.import to import a single patch
817 818
818 819 This function is explicitly defined here to help the evolve extension to
819 820 wrap this part of the import logic.
820 821
821 822 The API is currently a bit ugly because it a simple code translation from
822 823 the import command. Feel free to make it better.
823 824
824 825 :hunk: a patch (as a binary string)
825 826 :parents: nodes that will be parent of the created commit
826 827 :opts: the full dict of option passed to the import command
827 828 :msgs: list to save commit message to.
828 829 (used in case we need to save it when failing)
829 830 :updatefunc: a function that update a repo to a given node
830 831 updatefunc(<repo>, <node>)
831 832 """
832 833 tmpname, message, user, date, branch, nodeid, p1, p2 = \
833 834 patch.extract(ui, hunk)
834 835
835 836 update = not opts.get('bypass')
836 837 strip = opts["strip"]
837 838 prefix = opts["prefix"]
838 839 sim = float(opts.get('similarity') or 0)
839 840 if not tmpname:
840 841 return (None, None, False)
841 842 msg = _('applied to working directory')
842 843
843 844 rejects = False
844 845 dsguard = None
845 846
846 847 try:
847 848 cmdline_message = logmessage(ui, opts)
848 849 if cmdline_message:
849 850 # pickup the cmdline msg
850 851 message = cmdline_message
851 852 elif message:
852 853 # pickup the patch msg
853 854 message = message.strip()
854 855 else:
855 856 # launch the editor
856 857 message = None
857 858 ui.debug('message:\n%s\n' % message)
858 859
859 860 if len(parents) == 1:
860 861 parents.append(repo[nullid])
861 862 if opts.get('exact'):
862 863 if not nodeid or not p1:
863 864 raise util.Abort(_('not a Mercurial patch'))
864 865 p1 = repo[p1]
865 866 p2 = repo[p2 or nullid]
866 867 elif p2:
867 868 try:
868 869 p1 = repo[p1]
869 870 p2 = repo[p2]
870 871 # Without any options, consider p2 only if the
871 872 # patch is being applied on top of the recorded
872 873 # first parent.
873 874 if p1 != parents[0]:
874 875 p1 = parents[0]
875 876 p2 = repo[nullid]
876 877 except error.RepoError:
877 878 p1, p2 = parents
878 879 if p2.node() == nullid:
879 880 ui.warn(_("warning: import the patch as a normal revision\n"
880 881 "(use --exact to import the patch as a merge)\n"))
881 882 else:
882 883 p1, p2 = parents
883 884
884 885 n = None
885 886 if update:
886 887 dsguard = dirstateguard(repo, 'tryimportone')
887 888 if p1 != parents[0]:
888 889 updatefunc(repo, p1.node())
889 890 if p2 != parents[1]:
890 891 repo.setparents(p1.node(), p2.node())
891 892
892 893 if opts.get('exact') or opts.get('import_branch'):
893 894 repo.dirstate.setbranch(branch or 'default')
894 895
895 896 partial = opts.get('partial', False)
896 897 files = set()
897 898 try:
898 899 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
899 900 files=files, eolmode=None, similarity=sim / 100.0)
900 901 except patch.PatchError, e:
901 902 if not partial:
902 903 raise util.Abort(str(e))
903 904 if partial:
904 905 rejects = True
905 906
906 907 files = list(files)
907 908 if opts.get('no_commit'):
908 909 if message:
909 910 msgs.append(message)
910 911 else:
911 912 if opts.get('exact') or p2:
912 913 # If you got here, you either use --force and know what
913 914 # you are doing or used --exact or a merge patch while
914 915 # being updated to its first parent.
915 916 m = None
916 917 else:
917 918 m = scmutil.matchfiles(repo, files or [])
918 919 editform = mergeeditform(repo[None], 'import.normal')
919 920 if opts.get('exact'):
920 921 editor = None
921 922 else:
922 923 editor = getcommiteditor(editform=editform, **opts)
923 924 allowemptyback = repo.ui.backupconfig('ui', 'allowemptycommit')
924 925 try:
925 926 if partial:
926 927 repo.ui.setconfig('ui', 'allowemptycommit', True)
927 928 n = repo.commit(message, opts.get('user') or user,
928 929 opts.get('date') or date, match=m,
929 930 editor=editor)
930 931 finally:
931 932 repo.ui.restoreconfig(allowemptyback)
932 933 dsguard.close()
933 934 else:
934 935 if opts.get('exact') or opts.get('import_branch'):
935 936 branch = branch or 'default'
936 937 else:
937 938 branch = p1.branch()
938 939 store = patch.filestore()
939 940 try:
940 941 files = set()
941 942 try:
942 943 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
943 944 files, eolmode=None)
944 945 except patch.PatchError, e:
945 946 raise util.Abort(str(e))
946 947 if opts.get('exact'):
947 948 editor = None
948 949 else:
949 950 editor = getcommiteditor(editform='import.bypass')
950 951 memctx = context.makememctx(repo, (p1.node(), p2.node()),
951 952 message,
952 953 opts.get('user') or user,
953 954 opts.get('date') or date,
954 955 branch, files, store,
955 956 editor=editor)
956 957 n = memctx.commit()
957 958 finally:
958 959 store.close()
959 960 if opts.get('exact') and opts.get('no_commit'):
960 961 # --exact with --no-commit is still useful in that it does merge
961 962 # and branch bits
962 963 ui.warn(_("warning: can't check exact import with --no-commit\n"))
963 964 elif opts.get('exact') and hex(n) != nodeid:
964 965 raise util.Abort(_('patch is damaged or loses information'))
965 966 if n:
966 967 # i18n: refers to a short changeset id
967 968 msg = _('created %s') % short(n)
968 969 return (msg, n, rejects)
969 970 finally:
970 971 lockmod.release(dsguard)
971 972 os.unlink(tmpname)
972 973
973 974 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
974 975 opts=None):
975 976 '''export changesets as hg patches.'''
976 977
977 978 total = len(revs)
978 979 revwidth = max([len(str(rev)) for rev in revs])
979 980 filemode = {}
980 981
981 982 def single(rev, seqno, fp):
982 983 ctx = repo[rev]
983 984 node = ctx.node()
984 985 parents = [p.node() for p in ctx.parents() if p]
985 986 branch = ctx.branch()
986 987 if switch_parent:
987 988 parents.reverse()
988 989
989 990 if parents:
990 991 prev = parents[0]
991 992 else:
992 993 prev = nullid
993 994
994 995 shouldclose = False
995 996 if not fp and len(template) > 0:
996 997 desc_lines = ctx.description().rstrip().split('\n')
997 998 desc = desc_lines[0] #Commit always has a first line.
998 999 fp = makefileobj(repo, template, node, desc=desc, total=total,
999 1000 seqno=seqno, revwidth=revwidth, mode='wb',
1000 1001 modemap=filemode)
1001 1002 if fp != template:
1002 1003 shouldclose = True
1003 1004 if fp and fp != sys.stdout and util.safehasattr(fp, 'name'):
1004 1005 repo.ui.note("%s\n" % fp.name)
1005 1006
1006 1007 if not fp:
1007 1008 write = repo.ui.write
1008 1009 else:
1009 1010 def write(s, **kw):
1010 1011 fp.write(s)
1011 1012
1012 1013 write("# HG changeset patch\n")
1013 1014 write("# User %s\n" % ctx.user())
1014 1015 write("# Date %d %d\n" % ctx.date())
1015 1016 write("# %s\n" % util.datestr(ctx.date()))
1016 1017 if branch and branch != 'default':
1017 1018 write("# Branch %s\n" % branch)
1018 1019 write("# Node ID %s\n" % hex(node))
1019 1020 write("# Parent %s\n" % hex(prev))
1020 1021 if len(parents) > 1:
1021 1022 write("# Parent %s\n" % hex(parents[1]))
1022 1023 write(ctx.description().rstrip())
1023 1024 write("\n\n")
1024 1025
1025 1026 for chunk, label in patch.diffui(repo, prev, node, opts=opts):
1026 1027 write(chunk, label=label)
1027 1028
1028 1029 if shouldclose:
1029 1030 fp.close()
1030 1031
1031 1032 for seqno, rev in enumerate(revs):
1032 1033 single(rev, seqno + 1, fp)
1033 1034
1034 1035 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1035 1036 changes=None, stat=False, fp=None, prefix='',
1036 1037 root='', listsubrepos=False):
1037 1038 '''show diff or diffstat.'''
1038 1039 if fp is None:
1039 1040 write = ui.write
1040 1041 else:
1041 1042 def write(s, **kw):
1042 1043 fp.write(s)
1043 1044
1044 1045 if root:
1045 1046 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1046 1047 else:
1047 1048 relroot = ''
1048 1049 if relroot != '':
1049 1050 # XXX relative roots currently don't work if the root is within a
1050 1051 # subrepo
1051 1052 uirelroot = match.uipath(relroot)
1052 1053 relroot += '/'
1053 1054 for matchroot in match.files():
1054 1055 if not matchroot.startswith(relroot):
1055 1056 ui.warn(_('warning: %s not inside relative root %s\n') % (
1056 1057 match.uipath(matchroot), uirelroot))
1057 1058
1058 1059 if stat:
1059 1060 diffopts = diffopts.copy(context=0)
1060 1061 width = 80
1061 1062 if not ui.plain():
1062 1063 width = ui.termwidth()
1063 1064 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1064 1065 prefix=prefix, relroot=relroot)
1065 1066 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1066 1067 width=width,
1067 1068 git=diffopts.git):
1068 1069 write(chunk, label=label)
1069 1070 else:
1070 1071 for chunk, label in patch.diffui(repo, node1, node2, match,
1071 1072 changes, diffopts, prefix=prefix,
1072 1073 relroot=relroot):
1073 1074 write(chunk, label=label)
1074 1075
1075 1076 if listsubrepos:
1076 1077 ctx1 = repo[node1]
1077 1078 ctx2 = repo[node2]
1078 1079 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1079 1080 tempnode2 = node2
1080 1081 try:
1081 1082 if node2 is not None:
1082 1083 tempnode2 = ctx2.substate[subpath][1]
1083 1084 except KeyError:
1084 1085 # A subrepo that existed in node1 was deleted between node1 and
1085 1086 # node2 (inclusive). Thus, ctx2's substate won't contain that
1086 1087 # subpath. The best we can do is to ignore it.
1087 1088 tempnode2 = None
1088 1089 submatch = matchmod.narrowmatcher(subpath, match)
1089 1090 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1090 1091 stat=stat, fp=fp, prefix=prefix)
1091 1092
1092 1093 class changeset_printer(object):
1093 1094 '''show changeset information when templating not requested.'''
1094 1095
1095 1096 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1096 1097 self.ui = ui
1097 1098 self.repo = repo
1098 1099 self.buffered = buffered
1099 1100 self.matchfn = matchfn
1100 1101 self.diffopts = diffopts
1101 1102 self.header = {}
1102 1103 self.hunk = {}
1103 1104 self.lastheader = None
1104 1105 self.footer = None
1105 1106
1106 1107 def flush(self, rev):
1107 1108 if rev in self.header:
1108 1109 h = self.header[rev]
1109 1110 if h != self.lastheader:
1110 1111 self.lastheader = h
1111 1112 self.ui.write(h)
1112 1113 del self.header[rev]
1113 1114 if rev in self.hunk:
1114 1115 self.ui.write(self.hunk[rev])
1115 1116 del self.hunk[rev]
1116 1117 return 1
1117 1118 return 0
1118 1119
1119 1120 def close(self):
1120 1121 if self.footer:
1121 1122 self.ui.write(self.footer)
1122 1123
1123 1124 def show(self, ctx, copies=None, matchfn=None, **props):
1124 1125 if self.buffered:
1125 1126 self.ui.pushbuffer()
1126 1127 self._show(ctx, copies, matchfn, props)
1127 1128 self.hunk[ctx.rev()] = self.ui.popbuffer(labeled=True)
1128 1129 else:
1129 1130 self._show(ctx, copies, matchfn, props)
1130 1131
1131 1132 def _show(self, ctx, copies, matchfn, props):
1132 1133 '''show a single changeset or file revision'''
1133 1134 changenode = ctx.node()
1134 1135 rev = ctx.rev()
1135 1136 if self.ui.debugflag:
1136 1137 hexfunc = hex
1137 1138 else:
1138 1139 hexfunc = short
1139 1140 if rev is None:
1140 1141 pctx = ctx.p1()
1141 1142 revnode = (pctx.rev(), hexfunc(pctx.node()) + '+')
1142 1143 else:
1143 1144 revnode = (rev, hexfunc(changenode))
1144 1145
1145 1146 if self.ui.quiet:
1146 1147 self.ui.write("%d:%s\n" % revnode, label='log.node')
1147 1148 return
1148 1149
1149 1150 date = util.datestr(ctx.date())
1150 1151
1151 1152 # i18n: column positioning for "hg log"
1152 1153 self.ui.write(_("changeset: %d:%s\n") % revnode,
1153 1154 label='log.changeset changeset.%s' % ctx.phasestr())
1154 1155
1155 1156 # branches are shown first before any other names due to backwards
1156 1157 # compatibility
1157 1158 branch = ctx.branch()
1158 1159 # don't show the default branch name
1159 1160 if branch != 'default':
1160 1161 # i18n: column positioning for "hg log"
1161 1162 self.ui.write(_("branch: %s\n") % branch,
1162 1163 label='log.branch')
1163 1164
1164 1165 for name, ns in self.repo.names.iteritems():
1165 1166 # branches has special logic already handled above, so here we just
1166 1167 # skip it
1167 1168 if name == 'branches':
1168 1169 continue
1169 1170 # we will use the templatename as the color name since those two
1170 1171 # should be the same
1171 1172 for name in ns.names(self.repo, changenode):
1172 1173 self.ui.write(ns.logfmt % name,
1173 1174 label='log.%s' % ns.colorname)
1174 1175 if self.ui.debugflag:
1175 1176 # i18n: column positioning for "hg log"
1176 1177 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1177 1178 label='log.phase')
1178 1179 for pctx in self._meaningful_parentrevs(ctx):
1179 1180 label = 'log.parent changeset.%s' % pctx.phasestr()
1180 1181 # i18n: column positioning for "hg log"
1181 1182 self.ui.write(_("parent: %d:%s\n")
1182 1183 % (pctx.rev(), hexfunc(pctx.node())),
1183 1184 label=label)
1184 1185
1185 1186 if self.ui.debugflag and rev is not None:
1186 1187 mnode = ctx.manifestnode()
1187 1188 # i18n: column positioning for "hg log"
1188 1189 self.ui.write(_("manifest: %d:%s\n") %
1189 1190 (self.repo.manifest.rev(mnode), hex(mnode)),
1190 1191 label='ui.debug log.manifest')
1191 1192 # i18n: column positioning for "hg log"
1192 1193 self.ui.write(_("user: %s\n") % ctx.user(),
1193 1194 label='log.user')
1194 1195 # i18n: column positioning for "hg log"
1195 1196 self.ui.write(_("date: %s\n") % date,
1196 1197 label='log.date')
1197 1198
1198 1199 if self.ui.debugflag:
1199 1200 files = ctx.p1().status(ctx)[:3]
1200 1201 for key, value in zip([# i18n: column positioning for "hg log"
1201 1202 _("files:"),
1202 1203 # i18n: column positioning for "hg log"
1203 1204 _("files+:"),
1204 1205 # i18n: column positioning for "hg log"
1205 1206 _("files-:")], files):
1206 1207 if value:
1207 1208 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1208 1209 label='ui.debug log.files')
1209 1210 elif ctx.files() and self.ui.verbose:
1210 1211 # i18n: column positioning for "hg log"
1211 1212 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1212 1213 label='ui.note log.files')
1213 1214 if copies and self.ui.verbose:
1214 1215 copies = ['%s (%s)' % c for c in copies]
1215 1216 # i18n: column positioning for "hg log"
1216 1217 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1217 1218 label='ui.note log.copies')
1218 1219
1219 1220 extra = ctx.extra()
1220 1221 if extra and self.ui.debugflag:
1221 1222 for key, value in sorted(extra.items()):
1222 1223 # i18n: column positioning for "hg log"
1223 1224 self.ui.write(_("extra: %s=%s\n")
1224 1225 % (key, value.encode('string_escape')),
1225 1226 label='ui.debug log.extra')
1226 1227
1227 1228 description = ctx.description().strip()
1228 1229 if description:
1229 1230 if self.ui.verbose:
1230 1231 self.ui.write(_("description:\n"),
1231 1232 label='ui.note log.description')
1232 1233 self.ui.write(description,
1233 1234 label='ui.note log.description')
1234 1235 self.ui.write("\n\n")
1235 1236 else:
1236 1237 # i18n: column positioning for "hg log"
1237 1238 self.ui.write(_("summary: %s\n") %
1238 1239 description.splitlines()[0],
1239 1240 label='log.summary')
1240 1241 self.ui.write("\n")
1241 1242
1242 1243 self.showpatch(changenode, matchfn)
1243 1244
1244 1245 def showpatch(self, node, matchfn):
1245 1246 if not matchfn:
1246 1247 matchfn = self.matchfn
1247 1248 if matchfn:
1248 1249 stat = self.diffopts.get('stat')
1249 1250 diff = self.diffopts.get('patch')
1250 1251 diffopts = patch.diffallopts(self.ui, self.diffopts)
1251 1252 prev = self.repo.changelog.parents(node)[0]
1252 1253 if stat:
1253 1254 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1254 1255 match=matchfn, stat=True)
1255 1256 if diff:
1256 1257 if stat:
1257 1258 self.ui.write("\n")
1258 1259 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1259 1260 match=matchfn, stat=False)
1260 1261 self.ui.write("\n")
1261 1262
1262 1263 def _meaningful_parentrevs(self, ctx):
1263 1264 """Return list of meaningful (or all if debug) parentrevs for rev.
1264 1265
1265 1266 For merges (two non-nullrev revisions) both parents are meaningful.
1266 1267 Otherwise the first parent revision is considered meaningful if it
1267 1268 is not the preceding revision.
1268 1269 """
1269 1270 parents = ctx.parents()
1270 1271 if len(parents) > 1:
1271 1272 return parents
1272 1273 if self.ui.debugflag:
1273 1274 return [parents[0], self.repo['null']]
1274 1275 if parents[0].rev() >= scmutil.intrev(self.repo, ctx.rev()) - 1:
1275 1276 return []
1276 1277 return parents
1277 1278
1278 1279 class jsonchangeset(changeset_printer):
1279 1280 '''format changeset information.'''
1280 1281
1281 1282 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1282 1283 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1283 1284 self.cache = {}
1284 1285 self._first = True
1285 1286
1286 1287 def close(self):
1287 1288 if not self._first:
1288 1289 self.ui.write("\n]\n")
1289 1290 else:
1290 1291 self.ui.write("[]\n")
1291 1292
1292 1293 def _show(self, ctx, copies, matchfn, props):
1293 1294 '''show a single changeset or file revision'''
1294 1295 rev = ctx.rev()
1295 1296 if rev is None:
1296 1297 jrev = jnode = 'null'
1297 1298 else:
1298 1299 jrev = str(rev)
1299 1300 jnode = '"%s"' % hex(ctx.node())
1300 1301 j = encoding.jsonescape
1301 1302
1302 1303 if self._first:
1303 1304 self.ui.write("[\n {")
1304 1305 self._first = False
1305 1306 else:
1306 1307 self.ui.write(",\n {")
1307 1308
1308 1309 if self.ui.quiet:
1309 1310 self.ui.write('\n "rev": %s' % jrev)
1310 1311 self.ui.write(',\n "node": %s' % jnode)
1311 1312 self.ui.write('\n }')
1312 1313 return
1313 1314
1314 1315 self.ui.write('\n "rev": %s' % jrev)
1315 1316 self.ui.write(',\n "node": %s' % jnode)
1316 1317 self.ui.write(',\n "branch": "%s"' % j(ctx.branch()))
1317 1318 self.ui.write(',\n "phase": "%s"' % ctx.phasestr())
1318 1319 self.ui.write(',\n "user": "%s"' % j(ctx.user()))
1319 1320 self.ui.write(',\n "date": [%d, %d]' % ctx.date())
1320 1321 self.ui.write(',\n "desc": "%s"' % j(ctx.description()))
1321 1322
1322 1323 self.ui.write(',\n "bookmarks": [%s]' %
1323 1324 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1324 1325 self.ui.write(',\n "tags": [%s]' %
1325 1326 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1326 1327 self.ui.write(',\n "parents": [%s]' %
1327 1328 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1328 1329
1329 1330 if self.ui.debugflag:
1330 1331 if rev is None:
1331 1332 jmanifestnode = 'null'
1332 1333 else:
1333 1334 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1334 1335 self.ui.write(',\n "manifest": %s' % jmanifestnode)
1335 1336
1336 1337 self.ui.write(',\n "extra": {%s}' %
1337 1338 ", ".join('"%s": "%s"' % (j(k), j(v))
1338 1339 for k, v in ctx.extra().items()))
1339 1340
1340 1341 files = ctx.p1().status(ctx)
1341 1342 self.ui.write(',\n "modified": [%s]' %
1342 1343 ", ".join('"%s"' % j(f) for f in files[0]))
1343 1344 self.ui.write(',\n "added": [%s]' %
1344 1345 ", ".join('"%s"' % j(f) for f in files[1]))
1345 1346 self.ui.write(',\n "removed": [%s]' %
1346 1347 ", ".join('"%s"' % j(f) for f in files[2]))
1347 1348
1348 1349 elif self.ui.verbose:
1349 1350 self.ui.write(',\n "files": [%s]' %
1350 1351 ", ".join('"%s"' % j(f) for f in ctx.files()))
1351 1352
1352 1353 if copies:
1353 1354 self.ui.write(',\n "copies": {%s}' %
1354 1355 ", ".join('"%s": "%s"' % (j(k), j(v))
1355 1356 for k, v in copies))
1356 1357
1357 1358 matchfn = self.matchfn
1358 1359 if matchfn:
1359 1360 stat = self.diffopts.get('stat')
1360 1361 diff = self.diffopts.get('patch')
1361 1362 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1362 1363 node, prev = ctx.node(), ctx.p1().node()
1363 1364 if stat:
1364 1365 self.ui.pushbuffer()
1365 1366 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1366 1367 match=matchfn, stat=True)
1367 1368 self.ui.write(',\n "diffstat": "%s"' % j(self.ui.popbuffer()))
1368 1369 if diff:
1369 1370 self.ui.pushbuffer()
1370 1371 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1371 1372 match=matchfn, stat=False)
1372 1373 self.ui.write(',\n "diff": "%s"' % j(self.ui.popbuffer()))
1373 1374
1374 1375 self.ui.write("\n }")
1375 1376
1376 1377 class changeset_templater(changeset_printer):
1377 1378 '''format changeset information.'''
1378 1379
1379 1380 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1380 1381 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1381 1382 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
1382 1383 defaulttempl = {
1383 1384 'parent': '{rev}:{node|formatnode} ',
1384 1385 'manifest': '{rev}:{node|formatnode}',
1385 1386 'file_copy': '{name} ({source})',
1386 1387 'extra': '{key}={value|stringescape}'
1387 1388 }
1388 1389 # filecopy is preserved for compatibility reasons
1389 1390 defaulttempl['filecopy'] = defaulttempl['file_copy']
1390 1391 self.t = templater.templater(mapfile, {'formatnode': formatnode},
1391 1392 cache=defaulttempl)
1392 1393 if tmpl:
1393 1394 self.t.cache['changeset'] = tmpl
1394 1395
1395 1396 self.cache = {}
1396 1397
1397 1398 def _show(self, ctx, copies, matchfn, props):
1398 1399 '''show a single changeset or file revision'''
1399 1400
1400 1401 showlist = templatekw.showlist
1401 1402
1402 1403 # showparents() behaviour depends on ui trace level which
1403 1404 # causes unexpected behaviours at templating level and makes
1404 1405 # it harder to extract it in a standalone function. Its
1405 1406 # behaviour cannot be changed so leave it here for now.
1406 1407 def showparents(**args):
1407 1408 ctx = args['ctx']
1408 1409 parents = [[('rev', p.rev()),
1409 1410 ('node', p.hex()),
1410 1411 ('phase', p.phasestr())]
1411 1412 for p in self._meaningful_parentrevs(ctx)]
1412 1413 return showlist('parent', parents, **args)
1413 1414
1414 1415 props = props.copy()
1415 1416 props.update(templatekw.keywords)
1416 1417 props['parents'] = showparents
1417 1418 props['templ'] = self.t
1418 1419 props['ctx'] = ctx
1419 1420 props['repo'] = self.repo
1420 1421 props['revcache'] = {'copies': copies}
1421 1422 props['cache'] = self.cache
1422 1423
1423 1424 # find correct templates for current mode
1424 1425
1425 1426 tmplmodes = [
1426 1427 (True, None),
1427 1428 (self.ui.verbose, 'verbose'),
1428 1429 (self.ui.quiet, 'quiet'),
1429 1430 (self.ui.debugflag, 'debug'),
1430 1431 ]
1431 1432
1432 1433 types = {'header': '', 'footer':'', 'changeset': 'changeset'}
1433 1434 for mode, postfix in tmplmodes:
1434 1435 for type in types:
1435 1436 cur = postfix and ('%s_%s' % (type, postfix)) or type
1436 1437 if mode and cur in self.t:
1437 1438 types[type] = cur
1438 1439
1439 1440 try:
1440 1441
1441 1442 # write header
1442 1443 if types['header']:
1443 1444 h = templater.stringify(self.t(types['header'], **props))
1444 1445 if self.buffered:
1445 1446 self.header[ctx.rev()] = h
1446 1447 else:
1447 1448 if self.lastheader != h:
1448 1449 self.lastheader = h
1449 1450 self.ui.write(h)
1450 1451
1451 1452 # write changeset metadata, then patch if requested
1452 1453 key = types['changeset']
1453 1454 self.ui.write(templater.stringify(self.t(key, **props)))
1454 1455 self.showpatch(ctx.node(), matchfn)
1455 1456
1456 1457 if types['footer']:
1457 1458 if not self.footer:
1458 1459 self.footer = templater.stringify(self.t(types['footer'],
1459 1460 **props))
1460 1461
1461 1462 except KeyError, inst:
1462 1463 msg = _("%s: no key named '%s'")
1463 1464 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
1464 1465 except SyntaxError, inst:
1465 1466 raise util.Abort('%s: %s' % (self.t.mapfile, inst.args[0]))
1466 1467
1467 1468 def gettemplate(ui, tmpl, style):
1468 1469 """
1469 1470 Find the template matching the given template spec or style.
1470 1471 """
1471 1472
1472 1473 # ui settings
1473 1474 if not tmpl and not style: # template are stronger than style
1474 1475 tmpl = ui.config('ui', 'logtemplate')
1475 1476 if tmpl:
1476 1477 try:
1477 1478 tmpl = templater.unquotestring(tmpl)
1478 1479 except SyntaxError:
1479 1480 pass
1480 1481 return tmpl, None
1481 1482 else:
1482 1483 style = util.expandpath(ui.config('ui', 'style', ''))
1483 1484
1484 1485 if not tmpl and style:
1485 1486 mapfile = style
1486 1487 if not os.path.split(mapfile)[0]:
1487 1488 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1488 1489 or templater.templatepath(mapfile))
1489 1490 if mapname:
1490 1491 mapfile = mapname
1491 1492 return None, mapfile
1492 1493
1493 1494 if not tmpl:
1494 1495 return None, None
1495 1496
1496 # looks like a literal template?
1497 if '{' in tmpl:
1498 return tmpl, None
1499
1500 # perhaps a stock style?
1501 if not os.path.split(tmpl)[0]:
1502 mapname = (templater.templatepath('map-cmdline.' + tmpl)
1503 or templater.templatepath(tmpl))
1504 if mapname and os.path.isfile(mapname):
1505 return None, mapname
1506
1507 # perhaps it's a reference to [templates]
1508 t = ui.config('templates', tmpl)
1509 if t:
1510 try:
1511 tmpl = templater.unquotestring(t)
1512 except SyntaxError:
1513 tmpl = t
1514 return tmpl, None
1515
1516 if tmpl == 'list':
1517 ui.write(_("available styles: %s\n") % templater.stylelist())
1518 raise util.Abort(_("specify a template"))
1519
1520 # perhaps it's a path to a map or a template
1521 if ('/' in tmpl or '\\' in tmpl) and os.path.isfile(tmpl):
1522 # is it a mapfile for a style?
1523 if os.path.basename(tmpl).startswith("map-"):
1524 return None, os.path.realpath(tmpl)
1525 tmpl = open(tmpl).read()
1526 return tmpl, None
1527
1528 # constant string?
1529 return tmpl, None
1497 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1530 1498
1531 1499 def show_changeset(ui, repo, opts, buffered=False):
1532 1500 """show one changeset using template or regular display.
1533 1501
1534 1502 Display format will be the first non-empty hit of:
1535 1503 1. option 'template'
1536 1504 2. option 'style'
1537 1505 3. [ui] setting 'logtemplate'
1538 1506 4. [ui] setting 'style'
1539 1507 If all of these values are either the unset or the empty string,
1540 1508 regular display via changeset_printer() is done.
1541 1509 """
1542 1510 # options
1543 1511 matchfn = None
1544 1512 if opts.get('patch') or opts.get('stat'):
1545 1513 matchfn = scmutil.matchall(repo)
1546 1514
1547 1515 if opts.get('template') == 'json':
1548 1516 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1549 1517
1550 1518 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1551 1519
1552 1520 if not tmpl and not mapfile:
1553 1521 return changeset_printer(ui, repo, matchfn, opts, buffered)
1554 1522
1555 1523 try:
1556 1524 t = changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile,
1557 1525 buffered)
1558 1526 except SyntaxError, inst:
1559 1527 raise util.Abort(inst.args[0])
1560 1528 return t
1561 1529
1562 1530 def showmarker(ui, marker):
1563 1531 """utility function to display obsolescence marker in a readable way
1564 1532
1565 1533 To be used by debug function."""
1566 1534 ui.write(hex(marker.precnode()))
1567 1535 for repl in marker.succnodes():
1568 1536 ui.write(' ')
1569 1537 ui.write(hex(repl))
1570 1538 ui.write(' %X ' % marker.flags())
1571 1539 parents = marker.parentnodes()
1572 1540 if parents is not None:
1573 1541 ui.write('{%s} ' % ', '.join(hex(p) for p in parents))
1574 1542 ui.write('(%s) ' % util.datestr(marker.date()))
1575 1543 ui.write('{%s}' % (', '.join('%r: %r' % t for t in
1576 1544 sorted(marker.metadata().items())
1577 1545 if t[0] != 'date')))
1578 1546 ui.write('\n')
1579 1547
1580 1548 def finddate(ui, repo, date):
1581 1549 """Find the tipmost changeset that matches the given date spec"""
1582 1550
1583 1551 df = util.matchdate(date)
1584 1552 m = scmutil.matchall(repo)
1585 1553 results = {}
1586 1554
1587 1555 def prep(ctx, fns):
1588 1556 d = ctx.date()
1589 1557 if df(d[0]):
1590 1558 results[ctx.rev()] = d
1591 1559
1592 1560 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1593 1561 rev = ctx.rev()
1594 1562 if rev in results:
1595 1563 ui.status(_("found revision %s from %s\n") %
1596 1564 (rev, util.datestr(results[rev])))
1597 1565 return str(rev)
1598 1566
1599 1567 raise util.Abort(_("revision matching date not found"))
1600 1568
1601 1569 def increasingwindows(windowsize=8, sizelimit=512):
1602 1570 while True:
1603 1571 yield windowsize
1604 1572 if windowsize < sizelimit:
1605 1573 windowsize *= 2
1606 1574
1607 1575 class FileWalkError(Exception):
1608 1576 pass
1609 1577
1610 1578 def walkfilerevs(repo, match, follow, revs, fncache):
1611 1579 '''Walks the file history for the matched files.
1612 1580
1613 1581 Returns the changeset revs that are involved in the file history.
1614 1582
1615 1583 Throws FileWalkError if the file history can't be walked using
1616 1584 filelogs alone.
1617 1585 '''
1618 1586 wanted = set()
1619 1587 copies = []
1620 1588 minrev, maxrev = min(revs), max(revs)
1621 1589 def filerevgen(filelog, last):
1622 1590 """
1623 1591 Only files, no patterns. Check the history of each file.
1624 1592
1625 1593 Examines filelog entries within minrev, maxrev linkrev range
1626 1594 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1627 1595 tuples in backwards order
1628 1596 """
1629 1597 cl_count = len(repo)
1630 1598 revs = []
1631 1599 for j in xrange(0, last + 1):
1632 1600 linkrev = filelog.linkrev(j)
1633 1601 if linkrev < minrev:
1634 1602 continue
1635 1603 # only yield rev for which we have the changelog, it can
1636 1604 # happen while doing "hg log" during a pull or commit
1637 1605 if linkrev >= cl_count:
1638 1606 break
1639 1607
1640 1608 parentlinkrevs = []
1641 1609 for p in filelog.parentrevs(j):
1642 1610 if p != nullrev:
1643 1611 parentlinkrevs.append(filelog.linkrev(p))
1644 1612 n = filelog.node(j)
1645 1613 revs.append((linkrev, parentlinkrevs,
1646 1614 follow and filelog.renamed(n)))
1647 1615
1648 1616 return reversed(revs)
1649 1617 def iterfiles():
1650 1618 pctx = repo['.']
1651 1619 for filename in match.files():
1652 1620 if follow:
1653 1621 if filename not in pctx:
1654 1622 raise util.Abort(_('cannot follow file not in parent '
1655 1623 'revision: "%s"') % filename)
1656 1624 yield filename, pctx[filename].filenode()
1657 1625 else:
1658 1626 yield filename, None
1659 1627 for filename_node in copies:
1660 1628 yield filename_node
1661 1629
1662 1630 for file_, node in iterfiles():
1663 1631 filelog = repo.file(file_)
1664 1632 if not len(filelog):
1665 1633 if node is None:
1666 1634 # A zero count may be a directory or deleted file, so
1667 1635 # try to find matching entries on the slow path.
1668 1636 if follow:
1669 1637 raise util.Abort(
1670 1638 _('cannot follow nonexistent file: "%s"') % file_)
1671 1639 raise FileWalkError("Cannot walk via filelog")
1672 1640 else:
1673 1641 continue
1674 1642
1675 1643 if node is None:
1676 1644 last = len(filelog) - 1
1677 1645 else:
1678 1646 last = filelog.rev(node)
1679 1647
1680 1648 # keep track of all ancestors of the file
1681 1649 ancestors = set([filelog.linkrev(last)])
1682 1650
1683 1651 # iterate from latest to oldest revision
1684 1652 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1685 1653 if not follow:
1686 1654 if rev > maxrev:
1687 1655 continue
1688 1656 else:
1689 1657 # Note that last might not be the first interesting
1690 1658 # rev to us:
1691 1659 # if the file has been changed after maxrev, we'll
1692 1660 # have linkrev(last) > maxrev, and we still need
1693 1661 # to explore the file graph
1694 1662 if rev not in ancestors:
1695 1663 continue
1696 1664 # XXX insert 1327 fix here
1697 1665 if flparentlinkrevs:
1698 1666 ancestors.update(flparentlinkrevs)
1699 1667
1700 1668 fncache.setdefault(rev, []).append(file_)
1701 1669 wanted.add(rev)
1702 1670 if copied:
1703 1671 copies.append(copied)
1704 1672
1705 1673 return wanted
1706 1674
1707 1675 class _followfilter(object):
1708 1676 def __init__(self, repo, onlyfirst=False):
1709 1677 self.repo = repo
1710 1678 self.startrev = nullrev
1711 1679 self.roots = set()
1712 1680 self.onlyfirst = onlyfirst
1713 1681
1714 1682 def match(self, rev):
1715 1683 def realparents(rev):
1716 1684 if self.onlyfirst:
1717 1685 return self.repo.changelog.parentrevs(rev)[0:1]
1718 1686 else:
1719 1687 return filter(lambda x: x != nullrev,
1720 1688 self.repo.changelog.parentrevs(rev))
1721 1689
1722 1690 if self.startrev == nullrev:
1723 1691 self.startrev = rev
1724 1692 return True
1725 1693
1726 1694 if rev > self.startrev:
1727 1695 # forward: all descendants
1728 1696 if not self.roots:
1729 1697 self.roots.add(self.startrev)
1730 1698 for parent in realparents(rev):
1731 1699 if parent in self.roots:
1732 1700 self.roots.add(rev)
1733 1701 return True
1734 1702 else:
1735 1703 # backwards: all parents
1736 1704 if not self.roots:
1737 1705 self.roots.update(realparents(self.startrev))
1738 1706 if rev in self.roots:
1739 1707 self.roots.remove(rev)
1740 1708 self.roots.update(realparents(rev))
1741 1709 return True
1742 1710
1743 1711 return False
1744 1712
1745 1713 def walkchangerevs(repo, match, opts, prepare):
1746 1714 '''Iterate over files and the revs in which they changed.
1747 1715
1748 1716 Callers most commonly need to iterate backwards over the history
1749 1717 in which they are interested. Doing so has awful (quadratic-looking)
1750 1718 performance, so we use iterators in a "windowed" way.
1751 1719
1752 1720 We walk a window of revisions in the desired order. Within the
1753 1721 window, we first walk forwards to gather data, then in the desired
1754 1722 order (usually backwards) to display it.
1755 1723
1756 1724 This function returns an iterator yielding contexts. Before
1757 1725 yielding each context, the iterator will first call the prepare
1758 1726 function on each context in the window in forward order.'''
1759 1727
1760 1728 follow = opts.get('follow') or opts.get('follow_first')
1761 1729 revs = _logrevs(repo, opts)
1762 1730 if not revs:
1763 1731 return []
1764 1732 wanted = set()
1765 1733 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1766 1734 opts.get('removed'))
1767 1735 fncache = {}
1768 1736 change = repo.changectx
1769 1737
1770 1738 # First step is to fill wanted, the set of revisions that we want to yield.
1771 1739 # When it does not induce extra cost, we also fill fncache for revisions in
1772 1740 # wanted: a cache of filenames that were changed (ctx.files()) and that
1773 1741 # match the file filtering conditions.
1774 1742
1775 1743 if match.always():
1776 1744 # No files, no patterns. Display all revs.
1777 1745 wanted = revs
1778 1746 elif not slowpath:
1779 1747 # We only have to read through the filelog to find wanted revisions
1780 1748
1781 1749 try:
1782 1750 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1783 1751 except FileWalkError:
1784 1752 slowpath = True
1785 1753
1786 1754 # We decided to fall back to the slowpath because at least one
1787 1755 # of the paths was not a file. Check to see if at least one of them
1788 1756 # existed in history, otherwise simply return
1789 1757 for path in match.files():
1790 1758 if path == '.' or path in repo.store:
1791 1759 break
1792 1760 else:
1793 1761 return []
1794 1762
1795 1763 if slowpath:
1796 1764 # We have to read the changelog to match filenames against
1797 1765 # changed files
1798 1766
1799 1767 if follow:
1800 1768 raise util.Abort(_('can only follow copies/renames for explicit '
1801 1769 'filenames'))
1802 1770
1803 1771 # The slow path checks files modified in every changeset.
1804 1772 # This is really slow on large repos, so compute the set lazily.
1805 1773 class lazywantedset(object):
1806 1774 def __init__(self):
1807 1775 self.set = set()
1808 1776 self.revs = set(revs)
1809 1777
1810 1778 # No need to worry about locality here because it will be accessed
1811 1779 # in the same order as the increasing window below.
1812 1780 def __contains__(self, value):
1813 1781 if value in self.set:
1814 1782 return True
1815 1783 elif not value in self.revs:
1816 1784 return False
1817 1785 else:
1818 1786 self.revs.discard(value)
1819 1787 ctx = change(value)
1820 1788 matches = filter(match, ctx.files())
1821 1789 if matches:
1822 1790 fncache[value] = matches
1823 1791 self.set.add(value)
1824 1792 return True
1825 1793 return False
1826 1794
1827 1795 def discard(self, value):
1828 1796 self.revs.discard(value)
1829 1797 self.set.discard(value)
1830 1798
1831 1799 wanted = lazywantedset()
1832 1800
1833 1801 # it might be worthwhile to do this in the iterator if the rev range
1834 1802 # is descending and the prune args are all within that range
1835 1803 for rev in opts.get('prune', ()):
1836 1804 rev = repo[rev].rev()
1837 1805 ff = _followfilter(repo)
1838 1806 stop = min(revs[0], revs[-1])
1839 1807 for x in xrange(rev, stop - 1, -1):
1840 1808 if ff.match(x):
1841 1809 wanted = wanted - [x]
1842 1810
1843 1811 # Now that wanted is correctly initialized, we can iterate over the
1844 1812 # revision range, yielding only revisions in wanted.
1845 1813 def iterate():
1846 1814 if follow and match.always():
1847 1815 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1848 1816 def want(rev):
1849 1817 return ff.match(rev) and rev in wanted
1850 1818 else:
1851 1819 def want(rev):
1852 1820 return rev in wanted
1853 1821
1854 1822 it = iter(revs)
1855 1823 stopiteration = False
1856 1824 for windowsize in increasingwindows():
1857 1825 nrevs = []
1858 1826 for i in xrange(windowsize):
1859 1827 rev = next(it, None)
1860 1828 if rev is None:
1861 1829 stopiteration = True
1862 1830 break
1863 1831 elif want(rev):
1864 1832 nrevs.append(rev)
1865 1833 for rev in sorted(nrevs):
1866 1834 fns = fncache.get(rev)
1867 1835 ctx = change(rev)
1868 1836 if not fns:
1869 1837 def fns_generator():
1870 1838 for f in ctx.files():
1871 1839 if match(f):
1872 1840 yield f
1873 1841 fns = fns_generator()
1874 1842 prepare(ctx, fns)
1875 1843 for rev in nrevs:
1876 1844 yield change(rev)
1877 1845
1878 1846 if stopiteration:
1879 1847 break
1880 1848
1881 1849 return iterate()
1882 1850
1883 1851 def _makefollowlogfilematcher(repo, files, followfirst):
1884 1852 # When displaying a revision with --patch --follow FILE, we have
1885 1853 # to know which file of the revision must be diffed. With
1886 1854 # --follow, we want the names of the ancestors of FILE in the
1887 1855 # revision, stored in "fcache". "fcache" is populated by
1888 1856 # reproducing the graph traversal already done by --follow revset
1889 1857 # and relating linkrevs to file names (which is not "correct" but
1890 1858 # good enough).
1891 1859 fcache = {}
1892 1860 fcacheready = [False]
1893 1861 pctx = repo['.']
1894 1862
1895 1863 def populate():
1896 1864 for fn in files:
1897 1865 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
1898 1866 for c in i:
1899 1867 fcache.setdefault(c.linkrev(), set()).add(c.path())
1900 1868
1901 1869 def filematcher(rev):
1902 1870 if not fcacheready[0]:
1903 1871 # Lazy initialization
1904 1872 fcacheready[0] = True
1905 1873 populate()
1906 1874 return scmutil.matchfiles(repo, fcache.get(rev, []))
1907 1875
1908 1876 return filematcher
1909 1877
1910 1878 def _makenofollowlogfilematcher(repo, pats, opts):
1911 1879 '''hook for extensions to override the filematcher for non-follow cases'''
1912 1880 return None
1913 1881
1914 1882 def _makelogrevset(repo, pats, opts, revs):
1915 1883 """Return (expr, filematcher) where expr is a revset string built
1916 1884 from log options and file patterns or None. If --stat or --patch
1917 1885 are not passed filematcher is None. Otherwise it is a callable
1918 1886 taking a revision number and returning a match objects filtering
1919 1887 the files to be detailed when displaying the revision.
1920 1888 """
1921 1889 opt2revset = {
1922 1890 'no_merges': ('not merge()', None),
1923 1891 'only_merges': ('merge()', None),
1924 1892 '_ancestors': ('ancestors(%(val)s)', None),
1925 1893 '_fancestors': ('_firstancestors(%(val)s)', None),
1926 1894 '_descendants': ('descendants(%(val)s)', None),
1927 1895 '_fdescendants': ('_firstdescendants(%(val)s)', None),
1928 1896 '_matchfiles': ('_matchfiles(%(val)s)', None),
1929 1897 'date': ('date(%(val)r)', None),
1930 1898 'branch': ('branch(%(val)r)', ' or '),
1931 1899 '_patslog': ('filelog(%(val)r)', ' or '),
1932 1900 '_patsfollow': ('follow(%(val)r)', ' or '),
1933 1901 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
1934 1902 'keyword': ('keyword(%(val)r)', ' or '),
1935 1903 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
1936 1904 'user': ('user(%(val)r)', ' or '),
1937 1905 }
1938 1906
1939 1907 opts = dict(opts)
1940 1908 # follow or not follow?
1941 1909 follow = opts.get('follow') or opts.get('follow_first')
1942 1910 if opts.get('follow_first'):
1943 1911 followfirst = 1
1944 1912 else:
1945 1913 followfirst = 0
1946 1914 # --follow with FILE behaviour depends on revs...
1947 1915 it = iter(revs)
1948 1916 startrev = it.next()
1949 1917 followdescendants = startrev < next(it, startrev)
1950 1918
1951 1919 # branch and only_branch are really aliases and must be handled at
1952 1920 # the same time
1953 1921 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
1954 1922 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
1955 1923 # pats/include/exclude are passed to match.match() directly in
1956 1924 # _matchfiles() revset but walkchangerevs() builds its matcher with
1957 1925 # scmutil.match(). The difference is input pats are globbed on
1958 1926 # platforms without shell expansion (windows).
1959 1927 wctx = repo[None]
1960 1928 match, pats = scmutil.matchandpats(wctx, pats, opts)
1961 1929 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1962 1930 opts.get('removed'))
1963 1931 if not slowpath:
1964 1932 for f in match.files():
1965 1933 if follow and f not in wctx:
1966 1934 # If the file exists, it may be a directory, so let it
1967 1935 # take the slow path.
1968 1936 if os.path.exists(repo.wjoin(f)):
1969 1937 slowpath = True
1970 1938 continue
1971 1939 else:
1972 1940 raise util.Abort(_('cannot follow file not in parent '
1973 1941 'revision: "%s"') % f)
1974 1942 filelog = repo.file(f)
1975 1943 if not filelog:
1976 1944 # A zero count may be a directory or deleted file, so
1977 1945 # try to find matching entries on the slow path.
1978 1946 if follow:
1979 1947 raise util.Abort(
1980 1948 _('cannot follow nonexistent file: "%s"') % f)
1981 1949 slowpath = True
1982 1950
1983 1951 # We decided to fall back to the slowpath because at least one
1984 1952 # of the paths was not a file. Check to see if at least one of them
1985 1953 # existed in history - in that case, we'll continue down the
1986 1954 # slowpath; otherwise, we can turn off the slowpath
1987 1955 if slowpath:
1988 1956 for path in match.files():
1989 1957 if path == '.' or path in repo.store:
1990 1958 break
1991 1959 else:
1992 1960 slowpath = False
1993 1961
1994 1962 fpats = ('_patsfollow', '_patsfollowfirst')
1995 1963 fnopats = (('_ancestors', '_fancestors'),
1996 1964 ('_descendants', '_fdescendants'))
1997 1965 if slowpath:
1998 1966 # See walkchangerevs() slow path.
1999 1967 #
2000 1968 # pats/include/exclude cannot be represented as separate
2001 1969 # revset expressions as their filtering logic applies at file
2002 1970 # level. For instance "-I a -X a" matches a revision touching
2003 1971 # "a" and "b" while "file(a) and not file(b)" does
2004 1972 # not. Besides, filesets are evaluated against the working
2005 1973 # directory.
2006 1974 matchargs = ['r:', 'd:relpath']
2007 1975 for p in pats:
2008 1976 matchargs.append('p:' + p)
2009 1977 for p in opts.get('include', []):
2010 1978 matchargs.append('i:' + p)
2011 1979 for p in opts.get('exclude', []):
2012 1980 matchargs.append('x:' + p)
2013 1981 matchargs = ','.join(('%r' % p) for p in matchargs)
2014 1982 opts['_matchfiles'] = matchargs
2015 1983 if follow:
2016 1984 opts[fnopats[0][followfirst]] = '.'
2017 1985 else:
2018 1986 if follow:
2019 1987 if pats:
2020 1988 # follow() revset interprets its file argument as a
2021 1989 # manifest entry, so use match.files(), not pats.
2022 1990 opts[fpats[followfirst]] = list(match.files())
2023 1991 else:
2024 1992 op = fnopats[followdescendants][followfirst]
2025 1993 opts[op] = 'rev(%d)' % startrev
2026 1994 else:
2027 1995 opts['_patslog'] = list(pats)
2028 1996
2029 1997 filematcher = None
2030 1998 if opts.get('patch') or opts.get('stat'):
2031 1999 # When following files, track renames via a special matcher.
2032 2000 # If we're forced to take the slowpath it means we're following
2033 2001 # at least one pattern/directory, so don't bother with rename tracking.
2034 2002 if follow and not match.always() and not slowpath:
2035 2003 # _makefollowlogfilematcher expects its files argument to be
2036 2004 # relative to the repo root, so use match.files(), not pats.
2037 2005 filematcher = _makefollowlogfilematcher(repo, match.files(),
2038 2006 followfirst)
2039 2007 else:
2040 2008 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2041 2009 if filematcher is None:
2042 2010 filematcher = lambda rev: match
2043 2011
2044 2012 expr = []
2045 2013 for op, val in sorted(opts.iteritems()):
2046 2014 if not val:
2047 2015 continue
2048 2016 if op not in opt2revset:
2049 2017 continue
2050 2018 revop, andor = opt2revset[op]
2051 2019 if '%(val)' not in revop:
2052 2020 expr.append(revop)
2053 2021 else:
2054 2022 if not isinstance(val, list):
2055 2023 e = revop % {'val': val}
2056 2024 else:
2057 2025 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2058 2026 expr.append(e)
2059 2027
2060 2028 if expr:
2061 2029 expr = '(' + ' and '.join(expr) + ')'
2062 2030 else:
2063 2031 expr = None
2064 2032 return expr, filematcher
2065 2033
2066 2034 def _logrevs(repo, opts):
2067 2035 # Default --rev value depends on --follow but --follow behaviour
2068 2036 # depends on revisions resolved from --rev...
2069 2037 follow = opts.get('follow') or opts.get('follow_first')
2070 2038 if opts.get('rev'):
2071 2039 revs = scmutil.revrange(repo, opts['rev'])
2072 2040 elif follow and repo.dirstate.p1() == nullid:
2073 2041 revs = revset.baseset()
2074 2042 elif follow:
2075 2043 revs = repo.revs('reverse(:.)')
2076 2044 else:
2077 2045 revs = revset.spanset(repo)
2078 2046 revs.reverse()
2079 2047 return revs
2080 2048
2081 2049 def getgraphlogrevs(repo, pats, opts):
2082 2050 """Return (revs, expr, filematcher) where revs is an iterable of
2083 2051 revision numbers, expr is a revset string built from log options
2084 2052 and file patterns or None, and used to filter 'revs'. If --stat or
2085 2053 --patch are not passed filematcher is None. Otherwise it is a
2086 2054 callable taking a revision number and returning a match objects
2087 2055 filtering the files to be detailed when displaying the revision.
2088 2056 """
2089 2057 limit = loglimit(opts)
2090 2058 revs = _logrevs(repo, opts)
2091 2059 if not revs:
2092 2060 return revset.baseset(), None, None
2093 2061 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2094 2062 if opts.get('rev'):
2095 2063 # User-specified revs might be unsorted, but don't sort before
2096 2064 # _makelogrevset because it might depend on the order of revs
2097 2065 revs.sort(reverse=True)
2098 2066 if expr:
2099 2067 # Revset matchers often operate faster on revisions in changelog
2100 2068 # order, because most filters deal with the changelog.
2101 2069 revs.reverse()
2102 2070 matcher = revset.match(repo.ui, expr)
2103 2071 # Revset matches can reorder revisions. "A or B" typically returns
2104 2072 # returns the revision matching A then the revision matching B. Sort
2105 2073 # again to fix that.
2106 2074 revs = matcher(repo, revs)
2107 2075 revs.sort(reverse=True)
2108 2076 if limit is not None:
2109 2077 limitedrevs = []
2110 2078 for idx, rev in enumerate(revs):
2111 2079 if idx >= limit:
2112 2080 break
2113 2081 limitedrevs.append(rev)
2114 2082 revs = revset.baseset(limitedrevs)
2115 2083
2116 2084 return revs, expr, filematcher
2117 2085
2118 2086 def getlogrevs(repo, pats, opts):
2119 2087 """Return (revs, expr, filematcher) where revs is an iterable of
2120 2088 revision numbers, expr is a revset string built from log options
2121 2089 and file patterns or None, and used to filter 'revs'. If --stat or
2122 2090 --patch are not passed filematcher is None. Otherwise it is a
2123 2091 callable taking a revision number and returning a match objects
2124 2092 filtering the files to be detailed when displaying the revision.
2125 2093 """
2126 2094 limit = loglimit(opts)
2127 2095 revs = _logrevs(repo, opts)
2128 2096 if not revs:
2129 2097 return revset.baseset([]), None, None
2130 2098 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2131 2099 if expr:
2132 2100 # Revset matchers often operate faster on revisions in changelog
2133 2101 # order, because most filters deal with the changelog.
2134 2102 if not opts.get('rev'):
2135 2103 revs.reverse()
2136 2104 matcher = revset.match(repo.ui, expr)
2137 2105 # Revset matches can reorder revisions. "A or B" typically returns
2138 2106 # returns the revision matching A then the revision matching B. Sort
2139 2107 # again to fix that.
2140 2108 revs = matcher(repo, revs)
2141 2109 if not opts.get('rev'):
2142 2110 revs.sort(reverse=True)
2143 2111 if limit is not None:
2144 2112 limitedrevs = []
2145 2113 for idx, r in enumerate(revs):
2146 2114 if limit <= idx:
2147 2115 break
2148 2116 limitedrevs.append(r)
2149 2117 revs = revset.baseset(limitedrevs)
2150 2118
2151 2119 return revs, expr, filematcher
2152 2120
2153 2121 def displaygraph(ui, dag, displayer, showparents, edgefn, getrenamed=None,
2154 2122 filematcher=None):
2155 2123 seen, state = [], graphmod.asciistate()
2156 2124 for rev, type, ctx, parents in dag:
2157 2125 char = 'o'
2158 2126 if ctx.node() in showparents:
2159 2127 char = '@'
2160 2128 elif ctx.obsolete():
2161 2129 char = 'x'
2162 2130 elif ctx.closesbranch():
2163 2131 char = '_'
2164 2132 copies = None
2165 2133 if getrenamed and ctx.rev():
2166 2134 copies = []
2167 2135 for fn in ctx.files():
2168 2136 rename = getrenamed(fn, ctx.rev())
2169 2137 if rename:
2170 2138 copies.append((fn, rename[0]))
2171 2139 revmatchfn = None
2172 2140 if filematcher is not None:
2173 2141 revmatchfn = filematcher(ctx.rev())
2174 2142 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2175 2143 lines = displayer.hunk.pop(rev).split('\n')
2176 2144 if not lines[-1]:
2177 2145 del lines[-1]
2178 2146 displayer.flush(rev)
2179 2147 edges = edgefn(type, char, lines, seen, rev, parents)
2180 2148 for type, char, lines, coldata in edges:
2181 2149 graphmod.ascii(ui, state, type, char, lines, coldata)
2182 2150 displayer.close()
2183 2151
2184 2152 def graphlog(ui, repo, *pats, **opts):
2185 2153 # Parameters are identical to log command ones
2186 2154 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2187 2155 revdag = graphmod.dagwalker(repo, revs)
2188 2156
2189 2157 getrenamed = None
2190 2158 if opts.get('copies'):
2191 2159 endrev = None
2192 2160 if opts.get('rev'):
2193 2161 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2194 2162 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2195 2163 displayer = show_changeset(ui, repo, opts, buffered=True)
2196 2164 showparents = [ctx.node() for ctx in repo[None].parents()]
2197 2165 displaygraph(ui, revdag, displayer, showparents,
2198 2166 graphmod.asciiedges, getrenamed, filematcher)
2199 2167
2200 2168 def checkunsupportedgraphflags(pats, opts):
2201 2169 for op in ["newest_first"]:
2202 2170 if op in opts and opts[op]:
2203 2171 raise util.Abort(_("-G/--graph option is incompatible with --%s")
2204 2172 % op.replace("_", "-"))
2205 2173
2206 2174 def graphrevs(repo, nodes, opts):
2207 2175 limit = loglimit(opts)
2208 2176 nodes.reverse()
2209 2177 if limit is not None:
2210 2178 nodes = nodes[:limit]
2211 2179 return graphmod.nodes(repo, nodes)
2212 2180
2213 2181 def add(ui, repo, match, prefix, explicitonly, **opts):
2214 2182 join = lambda f: os.path.join(prefix, f)
2215 2183 bad = []
2216 2184
2217 2185 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2218 2186 names = []
2219 2187 wctx = repo[None]
2220 2188 cca = None
2221 2189 abort, warn = scmutil.checkportabilityalert(ui)
2222 2190 if abort or warn:
2223 2191 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2224 2192
2225 2193 for f in wctx.walk(matchmod.badmatch(match, badfn)):
2226 2194 exact = match.exact(f)
2227 2195 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2228 2196 if cca:
2229 2197 cca(f)
2230 2198 names.append(f)
2231 2199 if ui.verbose or not exact:
2232 2200 ui.status(_('adding %s\n') % match.rel(f))
2233 2201
2234 2202 for subpath in sorted(wctx.substate):
2235 2203 sub = wctx.sub(subpath)
2236 2204 try:
2237 2205 submatch = matchmod.narrowmatcher(subpath, match)
2238 2206 if opts.get('subrepos'):
2239 2207 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2240 2208 else:
2241 2209 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2242 2210 except error.LookupError:
2243 2211 ui.status(_("skipping missing subrepository: %s\n")
2244 2212 % join(subpath))
2245 2213
2246 2214 if not opts.get('dry_run'):
2247 2215 rejected = wctx.add(names, prefix)
2248 2216 bad.extend(f for f in rejected if f in match.files())
2249 2217 return bad
2250 2218
2251 2219 def forget(ui, repo, match, prefix, explicitonly):
2252 2220 join = lambda f: os.path.join(prefix, f)
2253 2221 bad = []
2254 2222 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2255 2223 wctx = repo[None]
2256 2224 forgot = []
2257 2225
2258 2226 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2259 2227 forget = sorted(s[0] + s[1] + s[3] + s[6])
2260 2228 if explicitonly:
2261 2229 forget = [f for f in forget if match.exact(f)]
2262 2230
2263 2231 for subpath in sorted(wctx.substate):
2264 2232 sub = wctx.sub(subpath)
2265 2233 try:
2266 2234 submatch = matchmod.narrowmatcher(subpath, match)
2267 2235 subbad, subforgot = sub.forget(submatch, prefix)
2268 2236 bad.extend([subpath + '/' + f for f in subbad])
2269 2237 forgot.extend([subpath + '/' + f for f in subforgot])
2270 2238 except error.LookupError:
2271 2239 ui.status(_("skipping missing subrepository: %s\n")
2272 2240 % join(subpath))
2273 2241
2274 2242 if not explicitonly:
2275 2243 for f in match.files():
2276 2244 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2277 2245 if f not in forgot:
2278 2246 if repo.wvfs.exists(f):
2279 2247 # Don't complain if the exact case match wasn't given.
2280 2248 # But don't do this until after checking 'forgot', so
2281 2249 # that subrepo files aren't normalized, and this op is
2282 2250 # purely from data cached by the status walk above.
2283 2251 if repo.dirstate.normalize(f) in repo.dirstate:
2284 2252 continue
2285 2253 ui.warn(_('not removing %s: '
2286 2254 'file is already untracked\n')
2287 2255 % match.rel(f))
2288 2256 bad.append(f)
2289 2257
2290 2258 for f in forget:
2291 2259 if ui.verbose or not match.exact(f):
2292 2260 ui.status(_('removing %s\n') % match.rel(f))
2293 2261
2294 2262 rejected = wctx.forget(forget, prefix)
2295 2263 bad.extend(f for f in rejected if f in match.files())
2296 2264 forgot.extend(f for f in forget if f not in rejected)
2297 2265 return bad, forgot
2298 2266
2299 2267 def files(ui, ctx, m, fm, fmt, subrepos):
2300 2268 rev = ctx.rev()
2301 2269 ret = 1
2302 2270 ds = ctx.repo().dirstate
2303 2271
2304 2272 for f in ctx.matches(m):
2305 2273 if rev is None and ds[f] == 'r':
2306 2274 continue
2307 2275 fm.startitem()
2308 2276 if ui.verbose:
2309 2277 fc = ctx[f]
2310 2278 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2311 2279 fm.data(abspath=f)
2312 2280 fm.write('path', fmt, m.rel(f))
2313 2281 ret = 0
2314 2282
2315 2283 for subpath in sorted(ctx.substate):
2316 2284 def matchessubrepo(subpath):
2317 2285 return (m.always() or m.exact(subpath)
2318 2286 or any(f.startswith(subpath + '/') for f in m.files()))
2319 2287
2320 2288 if subrepos or matchessubrepo(subpath):
2321 2289 sub = ctx.sub(subpath)
2322 2290 try:
2323 2291 submatch = matchmod.narrowmatcher(subpath, m)
2324 2292 if sub.printfiles(ui, submatch, fm, fmt, subrepos) == 0:
2325 2293 ret = 0
2326 2294 except error.LookupError:
2327 2295 ui.status(_("skipping missing subrepository: %s\n")
2328 2296 % m.abs(subpath))
2329 2297
2330 2298 return ret
2331 2299
2332 2300 def remove(ui, repo, m, prefix, after, force, subrepos):
2333 2301 join = lambda f: os.path.join(prefix, f)
2334 2302 ret = 0
2335 2303 s = repo.status(match=m, clean=True)
2336 2304 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2337 2305
2338 2306 wctx = repo[None]
2339 2307
2340 2308 for subpath in sorted(wctx.substate):
2341 2309 def matchessubrepo(matcher, subpath):
2342 2310 if matcher.exact(subpath):
2343 2311 return True
2344 2312 for f in matcher.files():
2345 2313 if f.startswith(subpath):
2346 2314 return True
2347 2315 return False
2348 2316
2349 2317 if subrepos or matchessubrepo(m, subpath):
2350 2318 sub = wctx.sub(subpath)
2351 2319 try:
2352 2320 submatch = matchmod.narrowmatcher(subpath, m)
2353 2321 if sub.removefiles(submatch, prefix, after, force, subrepos):
2354 2322 ret = 1
2355 2323 except error.LookupError:
2356 2324 ui.status(_("skipping missing subrepository: %s\n")
2357 2325 % join(subpath))
2358 2326
2359 2327 # warn about failure to delete explicit files/dirs
2360 2328 deleteddirs = util.dirs(deleted)
2361 2329 for f in m.files():
2362 2330 def insubrepo():
2363 2331 for subpath in wctx.substate:
2364 2332 if f.startswith(subpath):
2365 2333 return True
2366 2334 return False
2367 2335
2368 2336 isdir = f in deleteddirs or wctx.hasdir(f)
2369 2337 if f in repo.dirstate or isdir or f == '.' or insubrepo():
2370 2338 continue
2371 2339
2372 2340 if repo.wvfs.exists(f):
2373 2341 if repo.wvfs.isdir(f):
2374 2342 ui.warn(_('not removing %s: no tracked files\n')
2375 2343 % m.rel(f))
2376 2344 else:
2377 2345 ui.warn(_('not removing %s: file is untracked\n')
2378 2346 % m.rel(f))
2379 2347 # missing files will generate a warning elsewhere
2380 2348 ret = 1
2381 2349
2382 2350 if force:
2383 2351 list = modified + deleted + clean + added
2384 2352 elif after:
2385 2353 list = deleted
2386 2354 for f in modified + added + clean:
2387 2355 ui.warn(_('not removing %s: file still exists\n') % m.rel(f))
2388 2356 ret = 1
2389 2357 else:
2390 2358 list = deleted + clean
2391 2359 for f in modified:
2392 2360 ui.warn(_('not removing %s: file is modified (use -f'
2393 2361 ' to force removal)\n') % m.rel(f))
2394 2362 ret = 1
2395 2363 for f in added:
2396 2364 ui.warn(_('not removing %s: file has been marked for add'
2397 2365 ' (use forget to undo)\n') % m.rel(f))
2398 2366 ret = 1
2399 2367
2400 2368 for f in sorted(list):
2401 2369 if ui.verbose or not m.exact(f):
2402 2370 ui.status(_('removing %s\n') % m.rel(f))
2403 2371
2404 2372 wlock = repo.wlock()
2405 2373 try:
2406 2374 if not after:
2407 2375 for f in list:
2408 2376 if f in added:
2409 2377 continue # we never unlink added files on remove
2410 2378 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
2411 2379 repo[None].forget(list)
2412 2380 finally:
2413 2381 wlock.release()
2414 2382
2415 2383 return ret
2416 2384
2417 2385 def cat(ui, repo, ctx, matcher, prefix, **opts):
2418 2386 err = 1
2419 2387
2420 2388 def write(path):
2421 2389 fp = makefileobj(repo, opts.get('output'), ctx.node(),
2422 2390 pathname=os.path.join(prefix, path))
2423 2391 data = ctx[path].data()
2424 2392 if opts.get('decode'):
2425 2393 data = repo.wwritedata(path, data)
2426 2394 fp.write(data)
2427 2395 fp.close()
2428 2396
2429 2397 # Automation often uses hg cat on single files, so special case it
2430 2398 # for performance to avoid the cost of parsing the manifest.
2431 2399 if len(matcher.files()) == 1 and not matcher.anypats():
2432 2400 file = matcher.files()[0]
2433 2401 mf = repo.manifest
2434 2402 mfnode = ctx.manifestnode()
2435 2403 if mfnode and mf.find(mfnode, file)[0]:
2436 2404 write(file)
2437 2405 return 0
2438 2406
2439 2407 # Don't warn about "missing" files that are really in subrepos
2440 2408 def badfn(path, msg):
2441 2409 for subpath in ctx.substate:
2442 2410 if path.startswith(subpath):
2443 2411 return
2444 2412 matcher.bad(path, msg)
2445 2413
2446 2414 for abs in ctx.walk(matchmod.badmatch(matcher, badfn)):
2447 2415 write(abs)
2448 2416 err = 0
2449 2417
2450 2418 for subpath in sorted(ctx.substate):
2451 2419 sub = ctx.sub(subpath)
2452 2420 try:
2453 2421 submatch = matchmod.narrowmatcher(subpath, matcher)
2454 2422
2455 2423 if not sub.cat(submatch, os.path.join(prefix, sub._path),
2456 2424 **opts):
2457 2425 err = 0
2458 2426 except error.RepoLookupError:
2459 2427 ui.status(_("skipping missing subrepository: %s\n")
2460 2428 % os.path.join(prefix, subpath))
2461 2429
2462 2430 return err
2463 2431
2464 2432 def commit(ui, repo, commitfunc, pats, opts):
2465 2433 '''commit the specified files or all outstanding changes'''
2466 2434 date = opts.get('date')
2467 2435 if date:
2468 2436 opts['date'] = util.parsedate(date)
2469 2437 message = logmessage(ui, opts)
2470 2438 matcher = scmutil.match(repo[None], pats, opts)
2471 2439
2472 2440 # extract addremove carefully -- this function can be called from a command
2473 2441 # that doesn't support addremove
2474 2442 if opts.get('addremove'):
2475 2443 if scmutil.addremove(repo, matcher, "", opts) != 0:
2476 2444 raise util.Abort(
2477 2445 _("failed to mark all new/missing files as added/removed"))
2478 2446
2479 2447 return commitfunc(ui, repo, message, matcher, opts)
2480 2448
2481 2449 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2482 2450 # amend will reuse the existing user if not specified, but the obsolete
2483 2451 # marker creation requires that the current user's name is specified.
2484 2452 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2485 2453 ui.username() # raise exception if username not set
2486 2454
2487 2455 ui.note(_('amending changeset %s\n') % old)
2488 2456 base = old.p1()
2489 2457
2490 2458 wlock = dsguard = lock = newid = None
2491 2459 try:
2492 2460 wlock = repo.wlock()
2493 2461 dsguard = dirstateguard(repo, 'amend')
2494 2462 lock = repo.lock()
2495 2463 tr = repo.transaction('amend')
2496 2464 try:
2497 2465 # See if we got a message from -m or -l, if not, open the editor
2498 2466 # with the message of the changeset to amend
2499 2467 message = logmessage(ui, opts)
2500 2468 # ensure logfile does not conflict with later enforcement of the
2501 2469 # message. potential logfile content has been processed by
2502 2470 # `logmessage` anyway.
2503 2471 opts.pop('logfile')
2504 2472 # First, do a regular commit to record all changes in the working
2505 2473 # directory (if there are any)
2506 2474 ui.callhooks = False
2507 2475 activebookmark = repo._activebookmark
2508 2476 try:
2509 2477 repo._activebookmark = None
2510 2478 opts['message'] = 'temporary amend commit for %s' % old
2511 2479 node = commit(ui, repo, commitfunc, pats, opts)
2512 2480 finally:
2513 2481 repo._activebookmark = activebookmark
2514 2482 ui.callhooks = True
2515 2483 ctx = repo[node]
2516 2484
2517 2485 # Participating changesets:
2518 2486 #
2519 2487 # node/ctx o - new (intermediate) commit that contains changes
2520 2488 # | from working dir to go into amending commit
2521 2489 # | (or a workingctx if there were no changes)
2522 2490 # |
2523 2491 # old o - changeset to amend
2524 2492 # |
2525 2493 # base o - parent of amending changeset
2526 2494
2527 2495 # Update extra dict from amended commit (e.g. to preserve graft
2528 2496 # source)
2529 2497 extra.update(old.extra())
2530 2498
2531 2499 # Also update it from the intermediate commit or from the wctx
2532 2500 extra.update(ctx.extra())
2533 2501
2534 2502 if len(old.parents()) > 1:
2535 2503 # ctx.files() isn't reliable for merges, so fall back to the
2536 2504 # slower repo.status() method
2537 2505 files = set([fn for st in repo.status(base, old)[:3]
2538 2506 for fn in st])
2539 2507 else:
2540 2508 files = set(old.files())
2541 2509
2542 2510 # Second, we use either the commit we just did, or if there were no
2543 2511 # changes the parent of the working directory as the version of the
2544 2512 # files in the final amend commit
2545 2513 if node:
2546 2514 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2547 2515
2548 2516 user = ctx.user()
2549 2517 date = ctx.date()
2550 2518 # Recompute copies (avoid recording a -> b -> a)
2551 2519 copied = copies.pathcopies(base, ctx)
2552 2520 if old.p2:
2553 2521 copied.update(copies.pathcopies(old.p2(), ctx))
2554 2522
2555 2523 # Prune files which were reverted by the updates: if old
2556 2524 # introduced file X and our intermediate commit, node,
2557 2525 # renamed that file, then those two files are the same and
2558 2526 # we can discard X from our list of files. Likewise if X
2559 2527 # was deleted, it's no longer relevant
2560 2528 files.update(ctx.files())
2561 2529
2562 2530 def samefile(f):
2563 2531 if f in ctx.manifest():
2564 2532 a = ctx.filectx(f)
2565 2533 if f in base.manifest():
2566 2534 b = base.filectx(f)
2567 2535 return (not a.cmp(b)
2568 2536 and a.flags() == b.flags())
2569 2537 else:
2570 2538 return False
2571 2539 else:
2572 2540 return f not in base.manifest()
2573 2541 files = [f for f in files if not samefile(f)]
2574 2542
2575 2543 def filectxfn(repo, ctx_, path):
2576 2544 try:
2577 2545 fctx = ctx[path]
2578 2546 flags = fctx.flags()
2579 2547 mctx = context.memfilectx(repo,
2580 2548 fctx.path(), fctx.data(),
2581 2549 islink='l' in flags,
2582 2550 isexec='x' in flags,
2583 2551 copied=copied.get(path))
2584 2552 return mctx
2585 2553 except KeyError:
2586 2554 return None
2587 2555 else:
2588 2556 ui.note(_('copying changeset %s to %s\n') % (old, base))
2589 2557
2590 2558 # Use version of files as in the old cset
2591 2559 def filectxfn(repo, ctx_, path):
2592 2560 try:
2593 2561 return old.filectx(path)
2594 2562 except KeyError:
2595 2563 return None
2596 2564
2597 2565 user = opts.get('user') or old.user()
2598 2566 date = opts.get('date') or old.date()
2599 2567 editform = mergeeditform(old, 'commit.amend')
2600 2568 editor = getcommiteditor(editform=editform, **opts)
2601 2569 if not message:
2602 2570 editor = getcommiteditor(edit=True, editform=editform)
2603 2571 message = old.description()
2604 2572
2605 2573 pureextra = extra.copy()
2606 2574 extra['amend_source'] = old.hex()
2607 2575
2608 2576 new = context.memctx(repo,
2609 2577 parents=[base.node(), old.p2().node()],
2610 2578 text=message,
2611 2579 files=files,
2612 2580 filectxfn=filectxfn,
2613 2581 user=user,
2614 2582 date=date,
2615 2583 extra=extra,
2616 2584 editor=editor)
2617 2585
2618 2586 newdesc = changelog.stripdesc(new.description())
2619 2587 if ((not node)
2620 2588 and newdesc == old.description()
2621 2589 and user == old.user()
2622 2590 and date == old.date()
2623 2591 and pureextra == old.extra()):
2624 2592 # nothing changed. continuing here would create a new node
2625 2593 # anyway because of the amend_source noise.
2626 2594 #
2627 2595 # This not what we expect from amend.
2628 2596 return old.node()
2629 2597
2630 2598 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2631 2599 try:
2632 2600 if opts.get('secret'):
2633 2601 commitphase = 'secret'
2634 2602 else:
2635 2603 commitphase = old.phase()
2636 2604 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2637 2605 newid = repo.commitctx(new)
2638 2606 finally:
2639 2607 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2640 2608 if newid != old.node():
2641 2609 # Reroute the working copy parent to the new changeset
2642 2610 repo.setparents(newid, nullid)
2643 2611
2644 2612 # Move bookmarks from old parent to amend commit
2645 2613 bms = repo.nodebookmarks(old.node())
2646 2614 if bms:
2647 2615 marks = repo._bookmarks
2648 2616 for bm in bms:
2649 2617 marks[bm] = newid
2650 2618 marks.write()
2651 2619 #commit the whole amend process
2652 2620 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2653 2621 if createmarkers and newid != old.node():
2654 2622 # mark the new changeset as successor of the rewritten one
2655 2623 new = repo[newid]
2656 2624 obs = [(old, (new,))]
2657 2625 if node:
2658 2626 obs.append((ctx, ()))
2659 2627
2660 2628 obsolete.createmarkers(repo, obs)
2661 2629 tr.close()
2662 2630 finally:
2663 2631 tr.release()
2664 2632 dsguard.close()
2665 2633 if not createmarkers and newid != old.node():
2666 2634 # Strip the intermediate commit (if there was one) and the amended
2667 2635 # commit
2668 2636 if node:
2669 2637 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2670 2638 ui.note(_('stripping amended changeset %s\n') % old)
2671 2639 repair.strip(ui, repo, old.node(), topic='amend-backup')
2672 2640 finally:
2673 2641 lockmod.release(lock, dsguard, wlock)
2674 2642 return newid
2675 2643
2676 2644 def commiteditor(repo, ctx, subs, editform=''):
2677 2645 if ctx.description():
2678 2646 return ctx.description()
2679 2647 return commitforceeditor(repo, ctx, subs, editform=editform)
2680 2648
2681 2649 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2682 2650 editform=''):
2683 2651 if not extramsg:
2684 2652 extramsg = _("Leave message empty to abort commit.")
2685 2653
2686 2654 forms = [e for e in editform.split('.') if e]
2687 2655 forms.insert(0, 'changeset')
2688 2656 while forms:
2689 2657 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2690 2658 if tmpl:
2691 2659 committext = buildcommittemplate(repo, ctx, subs, extramsg, tmpl)
2692 2660 break
2693 2661 forms.pop()
2694 2662 else:
2695 2663 committext = buildcommittext(repo, ctx, subs, extramsg)
2696 2664
2697 2665 # run editor in the repository root
2698 2666 olddir = os.getcwd()
2699 2667 os.chdir(repo.root)
2700 2668 text = repo.ui.edit(committext, ctx.user(), ctx.extra(), editform=editform)
2701 2669 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2702 2670 os.chdir(olddir)
2703 2671
2704 2672 if finishdesc:
2705 2673 text = finishdesc(text)
2706 2674 if not text.strip():
2707 2675 raise util.Abort(_("empty commit message"))
2708 2676
2709 2677 return text
2710 2678
2711 2679 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2712 2680 ui = repo.ui
2713 2681 tmpl, mapfile = gettemplate(ui, tmpl, None)
2714 2682
2715 2683 try:
2716 2684 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2717 2685 except SyntaxError, inst:
2718 2686 raise util.Abort(inst.args[0])
2719 2687
2720 2688 for k, v in repo.ui.configitems('committemplate'):
2721 2689 if k != 'changeset':
2722 2690 t.t.cache[k] = v
2723 2691
2724 2692 if not extramsg:
2725 2693 extramsg = '' # ensure that extramsg is string
2726 2694
2727 2695 ui.pushbuffer()
2728 2696 t.show(ctx, extramsg=extramsg)
2729 2697 return ui.popbuffer()
2730 2698
2731 2699 def buildcommittext(repo, ctx, subs, extramsg):
2732 2700 edittext = []
2733 2701 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2734 2702 if ctx.description():
2735 2703 edittext.append(ctx.description())
2736 2704 edittext.append("")
2737 2705 edittext.append("") # Empty line between message and comments.
2738 2706 edittext.append(_("HG: Enter commit message."
2739 2707 " Lines beginning with 'HG:' are removed."))
2740 2708 edittext.append("HG: %s" % extramsg)
2741 2709 edittext.append("HG: --")
2742 2710 edittext.append(_("HG: user: %s") % ctx.user())
2743 2711 if ctx.p2():
2744 2712 edittext.append(_("HG: branch merge"))
2745 2713 if ctx.branch():
2746 2714 edittext.append(_("HG: branch '%s'") % ctx.branch())
2747 2715 if bookmarks.isactivewdirparent(repo):
2748 2716 edittext.append(_("HG: bookmark '%s'") % repo._activebookmark)
2749 2717 edittext.extend([_("HG: subrepo %s") % s for s in subs])
2750 2718 edittext.extend([_("HG: added %s") % f for f in added])
2751 2719 edittext.extend([_("HG: changed %s") % f for f in modified])
2752 2720 edittext.extend([_("HG: removed %s") % f for f in removed])
2753 2721 if not added and not modified and not removed:
2754 2722 edittext.append(_("HG: no files changed"))
2755 2723 edittext.append("")
2756 2724
2757 2725 return "\n".join(edittext)
2758 2726
2759 2727 def commitstatus(repo, node, branch, bheads=None, opts={}):
2760 2728 ctx = repo[node]
2761 2729 parents = ctx.parents()
2762 2730
2763 2731 if (not opts.get('amend') and bheads and node not in bheads and not
2764 2732 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2765 2733 repo.ui.status(_('created new head\n'))
2766 2734 # The message is not printed for initial roots. For the other
2767 2735 # changesets, it is printed in the following situations:
2768 2736 #
2769 2737 # Par column: for the 2 parents with ...
2770 2738 # N: null or no parent
2771 2739 # B: parent is on another named branch
2772 2740 # C: parent is a regular non head changeset
2773 2741 # H: parent was a branch head of the current branch
2774 2742 # Msg column: whether we print "created new head" message
2775 2743 # In the following, it is assumed that there already exists some
2776 2744 # initial branch heads of the current branch, otherwise nothing is
2777 2745 # printed anyway.
2778 2746 #
2779 2747 # Par Msg Comment
2780 2748 # N N y additional topo root
2781 2749 #
2782 2750 # B N y additional branch root
2783 2751 # C N y additional topo head
2784 2752 # H N n usual case
2785 2753 #
2786 2754 # B B y weird additional branch root
2787 2755 # C B y branch merge
2788 2756 # H B n merge with named branch
2789 2757 #
2790 2758 # C C y additional head from merge
2791 2759 # C H n merge with a head
2792 2760 #
2793 2761 # H H n head merge: head count decreases
2794 2762
2795 2763 if not opts.get('close_branch'):
2796 2764 for r in parents:
2797 2765 if r.closesbranch() and r.branch() == branch:
2798 2766 repo.ui.status(_('reopening closed branch head %d\n') % r)
2799 2767
2800 2768 if repo.ui.debugflag:
2801 2769 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2802 2770 elif repo.ui.verbose:
2803 2771 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2804 2772
2805 2773 def revert(ui, repo, ctx, parents, *pats, **opts):
2806 2774 parent, p2 = parents
2807 2775 node = ctx.node()
2808 2776
2809 2777 mf = ctx.manifest()
2810 2778 if node == p2:
2811 2779 parent = p2
2812 2780 if node == parent:
2813 2781 pmf = mf
2814 2782 else:
2815 2783 pmf = None
2816 2784
2817 2785 # need all matching names in dirstate and manifest of target rev,
2818 2786 # so have to walk both. do not print errors if files exist in one
2819 2787 # but not other. in both cases, filesets should be evaluated against
2820 2788 # workingctx to get consistent result (issue4497). this means 'set:**'
2821 2789 # cannot be used to select missing files from target rev.
2822 2790
2823 2791 # `names` is a mapping for all elements in working copy and target revision
2824 2792 # The mapping is in the form:
2825 2793 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2826 2794 names = {}
2827 2795
2828 2796 wlock = repo.wlock()
2829 2797 try:
2830 2798 ## filling of the `names` mapping
2831 2799 # walk dirstate to fill `names`
2832 2800
2833 2801 interactive = opts.get('interactive', False)
2834 2802 wctx = repo[None]
2835 2803 m = scmutil.match(wctx, pats, opts)
2836 2804
2837 2805 # we'll need this later
2838 2806 targetsubs = sorted(s for s in wctx.substate if m(s))
2839 2807
2840 2808 if not m.always():
2841 2809 for abs in repo.walk(matchmod.badmatch(m, lambda x, y: False)):
2842 2810 names[abs] = m.rel(abs), m.exact(abs)
2843 2811
2844 2812 # walk target manifest to fill `names`
2845 2813
2846 2814 def badfn(path, msg):
2847 2815 if path in names:
2848 2816 return
2849 2817 if path in ctx.substate:
2850 2818 return
2851 2819 path_ = path + '/'
2852 2820 for f in names:
2853 2821 if f.startswith(path_):
2854 2822 return
2855 2823 ui.warn("%s: %s\n" % (m.rel(path), msg))
2856 2824
2857 2825 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
2858 2826 if abs not in names:
2859 2827 names[abs] = m.rel(abs), m.exact(abs)
2860 2828
2861 2829 # Find status of all file in `names`.
2862 2830 m = scmutil.matchfiles(repo, names)
2863 2831
2864 2832 changes = repo.status(node1=node, match=m,
2865 2833 unknown=True, ignored=True, clean=True)
2866 2834 else:
2867 2835 changes = repo.status(node1=node, match=m)
2868 2836 for kind in changes:
2869 2837 for abs in kind:
2870 2838 names[abs] = m.rel(abs), m.exact(abs)
2871 2839
2872 2840 m = scmutil.matchfiles(repo, names)
2873 2841
2874 2842 modified = set(changes.modified)
2875 2843 added = set(changes.added)
2876 2844 removed = set(changes.removed)
2877 2845 _deleted = set(changes.deleted)
2878 2846 unknown = set(changes.unknown)
2879 2847 unknown.update(changes.ignored)
2880 2848 clean = set(changes.clean)
2881 2849 modadded = set()
2882 2850
2883 2851 # split between files known in target manifest and the others
2884 2852 smf = set(mf)
2885 2853
2886 2854 # determine the exact nature of the deleted changesets
2887 2855 deladded = _deleted - smf
2888 2856 deleted = _deleted - deladded
2889 2857
2890 2858 # We need to account for the state of the file in the dirstate,
2891 2859 # even when we revert against something else than parent. This will
2892 2860 # slightly alter the behavior of revert (doing back up or not, delete
2893 2861 # or just forget etc).
2894 2862 if parent == node:
2895 2863 dsmodified = modified
2896 2864 dsadded = added
2897 2865 dsremoved = removed
2898 2866 # store all local modifications, useful later for rename detection
2899 2867 localchanges = dsmodified | dsadded
2900 2868 modified, added, removed = set(), set(), set()
2901 2869 else:
2902 2870 changes = repo.status(node1=parent, match=m)
2903 2871 dsmodified = set(changes.modified)
2904 2872 dsadded = set(changes.added)
2905 2873 dsremoved = set(changes.removed)
2906 2874 # store all local modifications, useful later for rename detection
2907 2875 localchanges = dsmodified | dsadded
2908 2876
2909 2877 # only take into account for removes between wc and target
2910 2878 clean |= dsremoved - removed
2911 2879 dsremoved &= removed
2912 2880 # distinct between dirstate remove and other
2913 2881 removed -= dsremoved
2914 2882
2915 2883 modadded = added & dsmodified
2916 2884 added -= modadded
2917 2885
2918 2886 # tell newly modified apart.
2919 2887 dsmodified &= modified
2920 2888 dsmodified |= modified & dsadded # dirstate added may needs backup
2921 2889 modified -= dsmodified
2922 2890
2923 2891 # We need to wait for some post-processing to update this set
2924 2892 # before making the distinction. The dirstate will be used for
2925 2893 # that purpose.
2926 2894 dsadded = added
2927 2895
2928 2896 # in case of merge, files that are actually added can be reported as
2929 2897 # modified, we need to post process the result
2930 2898 if p2 != nullid:
2931 2899 if pmf is None:
2932 2900 # only need parent manifest in the merge case,
2933 2901 # so do not read by default
2934 2902 pmf = repo[parent].manifest()
2935 2903 mergeadd = dsmodified - set(pmf)
2936 2904 dsadded |= mergeadd
2937 2905 dsmodified -= mergeadd
2938 2906
2939 2907 # if f is a rename, update `names` to also revert the source
2940 2908 cwd = repo.getcwd()
2941 2909 for f in localchanges:
2942 2910 src = repo.dirstate.copied(f)
2943 2911 # XXX should we check for rename down to target node?
2944 2912 if src and src not in names and repo.dirstate[src] == 'r':
2945 2913 dsremoved.add(src)
2946 2914 names[src] = (repo.pathto(src, cwd), True)
2947 2915
2948 2916 # distinguish between file to forget and the other
2949 2917 added = set()
2950 2918 for abs in dsadded:
2951 2919 if repo.dirstate[abs] != 'a':
2952 2920 added.add(abs)
2953 2921 dsadded -= added
2954 2922
2955 2923 for abs in deladded:
2956 2924 if repo.dirstate[abs] == 'a':
2957 2925 dsadded.add(abs)
2958 2926 deladded -= dsadded
2959 2927
2960 2928 # For files marked as removed, we check if an unknown file is present at
2961 2929 # the same path. If a such file exists it may need to be backed up.
2962 2930 # Making the distinction at this stage helps have simpler backup
2963 2931 # logic.
2964 2932 removunk = set()
2965 2933 for abs in removed:
2966 2934 target = repo.wjoin(abs)
2967 2935 if os.path.lexists(target):
2968 2936 removunk.add(abs)
2969 2937 removed -= removunk
2970 2938
2971 2939 dsremovunk = set()
2972 2940 for abs in dsremoved:
2973 2941 target = repo.wjoin(abs)
2974 2942 if os.path.lexists(target):
2975 2943 dsremovunk.add(abs)
2976 2944 dsremoved -= dsremovunk
2977 2945
2978 2946 # action to be actually performed by revert
2979 2947 # (<list of file>, message>) tuple
2980 2948 actions = {'revert': ([], _('reverting %s\n')),
2981 2949 'add': ([], _('adding %s\n')),
2982 2950 'remove': ([], _('removing %s\n')),
2983 2951 'drop': ([], _('removing %s\n')),
2984 2952 'forget': ([], _('forgetting %s\n')),
2985 2953 'undelete': ([], _('undeleting %s\n')),
2986 2954 'noop': (None, _('no changes needed to %s\n')),
2987 2955 'unknown': (None, _('file not managed: %s\n')),
2988 2956 }
2989 2957
2990 2958 # "constant" that convey the backup strategy.
2991 2959 # All set to `discard` if `no-backup` is set do avoid checking
2992 2960 # no_backup lower in the code.
2993 2961 # These values are ordered for comparison purposes
2994 2962 backup = 2 # unconditionally do backup
2995 2963 check = 1 # check if the existing file differs from target
2996 2964 discard = 0 # never do backup
2997 2965 if opts.get('no_backup'):
2998 2966 backup = check = discard
2999 2967
3000 2968 backupanddel = actions['remove']
3001 2969 if not opts.get('no_backup'):
3002 2970 backupanddel = actions['drop']
3003 2971
3004 2972 disptable = (
3005 2973 # dispatch table:
3006 2974 # file state
3007 2975 # action
3008 2976 # make backup
3009 2977
3010 2978 ## Sets that results that will change file on disk
3011 2979 # Modified compared to target, no local change
3012 2980 (modified, actions['revert'], discard),
3013 2981 # Modified compared to target, but local file is deleted
3014 2982 (deleted, actions['revert'], discard),
3015 2983 # Modified compared to target, local change
3016 2984 (dsmodified, actions['revert'], backup),
3017 2985 # Added since target
3018 2986 (added, actions['remove'], discard),
3019 2987 # Added in working directory
3020 2988 (dsadded, actions['forget'], discard),
3021 2989 # Added since target, have local modification
3022 2990 (modadded, backupanddel, backup),
3023 2991 # Added since target but file is missing in working directory
3024 2992 (deladded, actions['drop'], discard),
3025 2993 # Removed since target, before working copy parent
3026 2994 (removed, actions['add'], discard),
3027 2995 # Same as `removed` but an unknown file exists at the same path
3028 2996 (removunk, actions['add'], check),
3029 2997 # Removed since targe, marked as such in working copy parent
3030 2998 (dsremoved, actions['undelete'], discard),
3031 2999 # Same as `dsremoved` but an unknown file exists at the same path
3032 3000 (dsremovunk, actions['undelete'], check),
3033 3001 ## the following sets does not result in any file changes
3034 3002 # File with no modification
3035 3003 (clean, actions['noop'], discard),
3036 3004 # Existing file, not tracked anywhere
3037 3005 (unknown, actions['unknown'], discard),
3038 3006 )
3039 3007
3040 3008 for abs, (rel, exact) in sorted(names.items()):
3041 3009 # target file to be touch on disk (relative to cwd)
3042 3010 target = repo.wjoin(abs)
3043 3011 # search the entry in the dispatch table.
3044 3012 # if the file is in any of these sets, it was touched in the working
3045 3013 # directory parent and we are sure it needs to be reverted.
3046 3014 for table, (xlist, msg), dobackup in disptable:
3047 3015 if abs not in table:
3048 3016 continue
3049 3017 if xlist is not None:
3050 3018 xlist.append(abs)
3051 3019 if dobackup and (backup <= dobackup
3052 3020 or wctx[abs].cmp(ctx[abs])):
3053 3021 bakname = "%s.orig" % rel
3054 3022 ui.note(_('saving current version of %s as %s\n') %
3055 3023 (rel, bakname))
3056 3024 if not opts.get('dry_run'):
3057 3025 if interactive:
3058 3026 util.copyfile(target, bakname)
3059 3027 else:
3060 3028 util.rename(target, bakname)
3061 3029 if ui.verbose or not exact:
3062 3030 if not isinstance(msg, basestring):
3063 3031 msg = msg(abs)
3064 3032 ui.status(msg % rel)
3065 3033 elif exact:
3066 3034 ui.warn(msg % rel)
3067 3035 break
3068 3036
3069 3037 if not opts.get('dry_run'):
3070 3038 needdata = ('revert', 'add', 'undelete')
3071 3039 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3072 3040 _performrevert(repo, parents, ctx, actions, interactive)
3073 3041
3074 3042 if targetsubs:
3075 3043 # Revert the subrepos on the revert list
3076 3044 for sub in targetsubs:
3077 3045 try:
3078 3046 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3079 3047 except KeyError:
3080 3048 raise util.Abort("subrepository '%s' does not exist in %s!"
3081 3049 % (sub, short(ctx.node())))
3082 3050 finally:
3083 3051 wlock.release()
3084 3052
3085 3053 def _revertprefetch(repo, ctx, *files):
3086 3054 """Let extension changing the storage layer prefetch content"""
3087 3055 pass
3088 3056
3089 3057 def _performrevert(repo, parents, ctx, actions, interactive=False):
3090 3058 """function that actually perform all the actions computed for revert
3091 3059
3092 3060 This is an independent function to let extension to plug in and react to
3093 3061 the imminent revert.
3094 3062
3095 3063 Make sure you have the working directory locked when calling this function.
3096 3064 """
3097 3065 parent, p2 = parents
3098 3066 node = ctx.node()
3099 3067 def checkout(f):
3100 3068 fc = ctx[f]
3101 3069 return repo.wwrite(f, fc.data(), fc.flags())
3102 3070
3103 3071 audit_path = pathutil.pathauditor(repo.root)
3104 3072 for f in actions['forget'][0]:
3105 3073 repo.dirstate.drop(f)
3106 3074 for f in actions['remove'][0]:
3107 3075 audit_path(f)
3108 3076 try:
3109 3077 util.unlinkpath(repo.wjoin(f))
3110 3078 except OSError:
3111 3079 pass
3112 3080 repo.dirstate.remove(f)
3113 3081 for f in actions['drop'][0]:
3114 3082 audit_path(f)
3115 3083 repo.dirstate.remove(f)
3116 3084
3117 3085 normal = None
3118 3086 if node == parent:
3119 3087 # We're reverting to our parent. If possible, we'd like status
3120 3088 # to report the file as clean. We have to use normallookup for
3121 3089 # merges to avoid losing information about merged/dirty files.
3122 3090 if p2 != nullid:
3123 3091 normal = repo.dirstate.normallookup
3124 3092 else:
3125 3093 normal = repo.dirstate.normal
3126 3094
3127 3095 newlyaddedandmodifiedfiles = set()
3128 3096 if interactive:
3129 3097 # Prompt the user for changes to revert
3130 3098 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3131 3099 m = scmutil.match(ctx, torevert, {})
3132 3100 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3133 3101 diffopts.nodates = True
3134 3102 diffopts.git = True
3135 3103 reversehunks = repo.ui.configbool('experimental',
3136 3104 'revertalternateinteractivemode',
3137 3105 False)
3138 3106 if reversehunks:
3139 3107 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3140 3108 else:
3141 3109 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3142 3110 originalchunks = patch.parsepatch(diff)
3143 3111
3144 3112 try:
3145 3113
3146 3114 chunks = recordfilter(repo.ui, originalchunks)
3147 3115 if reversehunks:
3148 3116 chunks = patch.reversehunks(chunks)
3149 3117
3150 3118 except patch.PatchError, err:
3151 3119 raise util.Abort(_('error parsing patch: %s') % err)
3152 3120
3153 3121 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3154 3122 # Apply changes
3155 3123 fp = cStringIO.StringIO()
3156 3124 for c in chunks:
3157 3125 c.write(fp)
3158 3126 dopatch = fp.tell()
3159 3127 fp.seek(0)
3160 3128 if dopatch:
3161 3129 try:
3162 3130 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3163 3131 except patch.PatchError, err:
3164 3132 raise util.Abort(str(err))
3165 3133 del fp
3166 3134 else:
3167 3135 for f in actions['revert'][0]:
3168 3136 wsize = checkout(f)
3169 3137 if normal:
3170 3138 normal(f)
3171 3139 elif wsize == repo.dirstate._map[f][2]:
3172 3140 # changes may be overlooked without normallookup,
3173 3141 # if size isn't changed at reverting
3174 3142 repo.dirstate.normallookup(f)
3175 3143
3176 3144 for f in actions['add'][0]:
3177 3145 # Don't checkout modified files, they are already created by the diff
3178 3146 if f not in newlyaddedandmodifiedfiles:
3179 3147 checkout(f)
3180 3148 repo.dirstate.add(f)
3181 3149
3182 3150 normal = repo.dirstate.normallookup
3183 3151 if node == parent and p2 == nullid:
3184 3152 normal = repo.dirstate.normal
3185 3153 for f in actions['undelete'][0]:
3186 3154 checkout(f)
3187 3155 normal(f)
3188 3156
3189 3157 copied = copies.pathcopies(repo[parent], ctx)
3190 3158
3191 3159 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3192 3160 if f in copied:
3193 3161 repo.dirstate.copy(copied[f], f)
3194 3162
3195 3163 def command(table):
3196 3164 """Returns a function object to be used as a decorator for making commands.
3197 3165
3198 3166 This function receives a command table as its argument. The table should
3199 3167 be a dict.
3200 3168
3201 3169 The returned function can be used as a decorator for adding commands
3202 3170 to that command table. This function accepts multiple arguments to define
3203 3171 a command.
3204 3172
3205 3173 The first argument is the command name.
3206 3174
3207 3175 The options argument is an iterable of tuples defining command arguments.
3208 3176 See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
3209 3177
3210 3178 The synopsis argument defines a short, one line summary of how to use the
3211 3179 command. This shows up in the help output.
3212 3180
3213 3181 The norepo argument defines whether the command does not require a
3214 3182 local repository. Most commands operate against a repository, thus the
3215 3183 default is False.
3216 3184
3217 3185 The optionalrepo argument defines whether the command optionally requires
3218 3186 a local repository.
3219 3187
3220 3188 The inferrepo argument defines whether to try to find a repository from the
3221 3189 command line arguments. If True, arguments will be examined for potential
3222 3190 repository locations. See ``findrepo()``. If a repository is found, it
3223 3191 will be used.
3224 3192 """
3225 3193 def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False,
3226 3194 inferrepo=False):
3227 3195 def decorator(func):
3228 3196 if synopsis:
3229 3197 table[name] = func, list(options), synopsis
3230 3198 else:
3231 3199 table[name] = func, list(options)
3232 3200
3233 3201 if norepo:
3234 3202 # Avoid import cycle.
3235 3203 import commands
3236 3204 commands.norepo += ' %s' % ' '.join(parsealiases(name))
3237 3205
3238 3206 if optionalrepo:
3239 3207 import commands
3240 3208 commands.optionalrepo += ' %s' % ' '.join(parsealiases(name))
3241 3209
3242 3210 if inferrepo:
3243 3211 import commands
3244 3212 commands.inferrepo += ' %s' % ' '.join(parsealiases(name))
3245 3213
3246 3214 return func
3247 3215 return decorator
3248 3216
3249 3217 return cmd
3250 3218
3251 3219 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3252 3220 # commands.outgoing. "missing" is "missing" of the result of
3253 3221 # "findcommonoutgoing()"
3254 3222 outgoinghooks = util.hooks()
3255 3223
3256 3224 # a list of (ui, repo) functions called by commands.summary
3257 3225 summaryhooks = util.hooks()
3258 3226
3259 3227 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3260 3228 #
3261 3229 # functions should return tuple of booleans below, if 'changes' is None:
3262 3230 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3263 3231 #
3264 3232 # otherwise, 'changes' is a tuple of tuples below:
3265 3233 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3266 3234 # - (desturl, destbranch, destpeer, outgoing)
3267 3235 summaryremotehooks = util.hooks()
3268 3236
3269 3237 # A list of state files kept by multistep operations like graft.
3270 3238 # Since graft cannot be aborted, it is considered 'clearable' by update.
3271 3239 # note: bisect is intentionally excluded
3272 3240 # (state file, clearable, allowcommit, error, hint)
3273 3241 unfinishedstates = [
3274 3242 ('graftstate', True, False, _('graft in progress'),
3275 3243 _("use 'hg graft --continue' or 'hg update' to abort")),
3276 3244 ('updatestate', True, False, _('last update was interrupted'),
3277 3245 _("use 'hg update' to get a consistent checkout"))
3278 3246 ]
3279 3247
3280 3248 def checkunfinished(repo, commit=False):
3281 3249 '''Look for an unfinished multistep operation, like graft, and abort
3282 3250 if found. It's probably good to check this right before
3283 3251 bailifchanged().
3284 3252 '''
3285 3253 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3286 3254 if commit and allowcommit:
3287 3255 continue
3288 3256 if repo.vfs.exists(f):
3289 3257 raise util.Abort(msg, hint=hint)
3290 3258
3291 3259 def clearunfinished(repo):
3292 3260 '''Check for unfinished operations (as above), and clear the ones
3293 3261 that are clearable.
3294 3262 '''
3295 3263 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3296 3264 if not clearable and repo.vfs.exists(f):
3297 3265 raise util.Abort(msg, hint=hint)
3298 3266 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3299 3267 if clearable and repo.vfs.exists(f):
3300 3268 util.unlink(repo.join(f))
3301 3269
3302 3270 class dirstateguard(object):
3303 3271 '''Restore dirstate at unexpected failure.
3304 3272
3305 3273 At the construction, this class does:
3306 3274
3307 3275 - write current ``repo.dirstate`` out, and
3308 3276 - save ``.hg/dirstate`` into the backup file
3309 3277
3310 3278 This restores ``.hg/dirstate`` from backup file, if ``release()``
3311 3279 is invoked before ``close()``.
3312 3280
3313 3281 This just removes the backup file at ``close()`` before ``release()``.
3314 3282 '''
3315 3283
3316 3284 def __init__(self, repo, name):
3317 3285 repo.dirstate.write()
3318 3286 self._repo = repo
3319 3287 self._filename = 'dirstate.backup.%s.%d' % (name, id(self))
3320 3288 repo.vfs.write(self._filename, repo.vfs.tryread('dirstate'))
3321 3289 self._active = True
3322 3290 self._closed = False
3323 3291
3324 3292 def __del__(self):
3325 3293 if self._active: # still active
3326 3294 # this may occur, even if this class is used correctly:
3327 3295 # for example, releasing other resources like transaction
3328 3296 # may raise exception before ``dirstateguard.release`` in
3329 3297 # ``release(tr, ....)``.
3330 3298 self._abort()
3331 3299
3332 3300 def close(self):
3333 3301 if not self._active: # already inactivated
3334 3302 msg = (_("can't close already inactivated backup: %s")
3335 3303 % self._filename)
3336 3304 raise util.Abort(msg)
3337 3305
3338 3306 self._repo.vfs.unlink(self._filename)
3339 3307 self._active = False
3340 3308 self._closed = True
3341 3309
3342 3310 def _abort(self):
3343 3311 # this "invalidate()" prevents "wlock.release()" from writing
3344 3312 # changes of dirstate out after restoring to original status
3345 3313 self._repo.dirstate.invalidate()
3346 3314
3347 3315 self._repo.vfs.rename(self._filename, 'dirstate')
3348 3316 self._active = False
3349 3317
3350 3318 def release(self):
3351 3319 if not self._closed:
3352 3320 if not self._active: # already inactivated
3353 3321 msg = (_("can't release already inactivated backup: %s")
3354 3322 % self._filename)
3355 3323 raise util.Abort(msg)
3356 3324 self._abort()
@@ -1,150 +1,188
1 1 # formatter.py - generic output formatting for mercurial
2 2 #
3 3 # Copyright 2012 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 import cPickle
9 9 from node import hex, short
10 10 from i18n import _
11 11 import encoding, util
12 import templater
13 import os
12 14
13 15 class baseformatter(object):
14 16 def __init__(self, ui, topic, opts):
15 17 self._ui = ui
16 18 self._topic = topic
17 19 self._style = opts.get("style")
18 20 self._template = opts.get("template")
19 21 self._item = None
20 22 # function to convert node to string suitable for this output
21 23 self.hexfunc = hex
22 24 def __nonzero__(self):
23 25 '''return False if we're not doing real templating so we can
24 26 skip extra work'''
25 27 return True
26 28 def _showitem(self):
27 29 '''show a formatted item once all data is collected'''
28 30 pass
29 31 def startitem(self):
30 32 '''begin an item in the format list'''
31 33 if self._item is not None:
32 34 self._showitem()
33 35 self._item = {}
34 36 def data(self, **data):
35 37 '''insert data into item that's not shown in default output'''
36 38 self._item.update(data)
37 39 def write(self, fields, deftext, *fielddata, **opts):
38 40 '''do default text output while assigning data to item'''
39 41 for k, v in zip(fields.split(), fielddata):
40 42 self._item[k] = v
41 43 def condwrite(self, cond, fields, deftext, *fielddata, **opts):
42 44 '''do conditional write (primarily for plain formatter)'''
43 45 for k, v in zip(fields.split(), fielddata):
44 46 self._item[k] = v
45 47 def plain(self, text, **opts):
46 48 '''show raw text for non-templated mode'''
47 49 pass
48 50 def end(self):
49 51 '''end output for the formatter'''
50 52 if self._item is not None:
51 53 self._showitem()
52 54
53 55 class plainformatter(baseformatter):
54 56 '''the default text output scheme'''
55 57 def __init__(self, ui, topic, opts):
56 58 baseformatter.__init__(self, ui, topic, opts)
57 59 if ui.debugflag:
58 60 self.hexfunc = hex
59 61 else:
60 62 self.hexfunc = short
61 63 def __nonzero__(self):
62 64 return False
63 65 def startitem(self):
64 66 pass
65 67 def data(self, **data):
66 68 pass
67 69 def write(self, fields, deftext, *fielddata, **opts):
68 70 self._ui.write(deftext % fielddata, **opts)
69 71 def condwrite(self, cond, fields, deftext, *fielddata, **opts):
70 72 '''do conditional write'''
71 73 if cond:
72 74 self._ui.write(deftext % fielddata, **opts)
73 75 def plain(self, text, **opts):
74 76 self._ui.write(text, **opts)
75 77 def end(self):
76 78 pass
77 79
78 80 class debugformatter(baseformatter):
79 81 def __init__(self, ui, topic, opts):
80 82 baseformatter.__init__(self, ui, topic, opts)
81 83 self._ui.write("%s = [\n" % self._topic)
82 84 def _showitem(self):
83 85 self._ui.write(" " + repr(self._item) + ",\n")
84 86 def end(self):
85 87 baseformatter.end(self)
86 88 self._ui.write("]\n")
87 89
88 90 class pickleformatter(baseformatter):
89 91 def __init__(self, ui, topic, opts):
90 92 baseformatter.__init__(self, ui, topic, opts)
91 93 self._data = []
92 94 def _showitem(self):
93 95 self._data.append(self._item)
94 96 def end(self):
95 97 baseformatter.end(self)
96 98 self._ui.write(cPickle.dumps(self._data))
97 99
98 100 def _jsonifyobj(v):
99 101 if isinstance(v, tuple):
100 102 return '[' + ', '.join(_jsonifyobj(e) for e in v) + ']'
101 103 elif v is None:
102 104 return 'null'
103 105 elif v is True:
104 106 return 'true'
105 107 elif v is False:
106 108 return 'false'
107 109 elif isinstance(v, (int, float)):
108 110 return str(v)
109 111 else:
110 112 return '"%s"' % encoding.jsonescape(v)
111 113
112 114 class jsonformatter(baseformatter):
113 115 def __init__(self, ui, topic, opts):
114 116 baseformatter.__init__(self, ui, topic, opts)
115 117 self._ui.write("[")
116 118 self._ui._first = True
117 119 def _showitem(self):
118 120 if self._ui._first:
119 121 self._ui._first = False
120 122 else:
121 123 self._ui.write(",")
122 124
123 125 self._ui.write("\n {\n")
124 126 first = True
125 127 for k, v in sorted(self._item.items()):
126 128 if first:
127 129 first = False
128 130 else:
129 131 self._ui.write(",\n")
130 132 self._ui.write(' "%s": %s' % (k, _jsonifyobj(v)))
131 133 self._ui.write("\n }")
132 134 def end(self):
133 135 baseformatter.end(self)
134 136 self._ui.write("\n]\n")
135 137
138 def lookuptemplate(ui, topic, tmpl):
139 # looks like a literal template?
140 if '{' in tmpl:
141 return tmpl, None
142
143 # perhaps a stock style?
144 if not os.path.split(tmpl)[0]:
145 mapname = (templater.templatepath('map-cmdline.' + tmpl)
146 or templater.templatepath(tmpl))
147 if mapname and os.path.isfile(mapname):
148 return None, mapname
149
150 # perhaps it's a reference to [templates]
151 t = ui.config('templates', tmpl)
152 if t:
153 try:
154 tmpl = templater.unquotestring(t)
155 except SyntaxError:
156 tmpl = t
157 return tmpl, None
158
159 if tmpl == 'list':
160 ui.write(_("available styles: %s\n") % templater.stylelist())
161 raise util.Abort(_("specify a template"))
162
163 # perhaps it's a path to a map or a template
164 if ('/' in tmpl or '\\' in tmpl) and os.path.isfile(tmpl):
165 # is it a mapfile for a style?
166 if os.path.basename(tmpl).startswith("map-"):
167 return None, os.path.realpath(tmpl)
168 tmpl = open(tmpl).read()
169 return tmpl, None
170
171 # constant string?
172 return tmpl, None
173
136 174 def formatter(ui, topic, opts):
137 175 template = opts.get("template", "")
138 176 if template == "json":
139 177 return jsonformatter(ui, topic, opts)
140 178 elif template == "pickle":
141 179 return pickleformatter(ui, topic, opts)
142 180 elif template == "debug":
143 181 return debugformatter(ui, topic, opts)
144 182 elif template != "":
145 183 raise util.Abort(_("custom templates not yet supported"))
146 184 elif ui.configbool('ui', 'formatdebug'):
147 185 return debugformatter(ui, topic, opts)
148 186 elif ui.configbool('ui', 'formatjson'):
149 187 return jsonformatter(ui, topic, opts)
150 188 return plainformatter(ui, topic, opts)
General Comments 0
You need to be logged in to leave comments. Login now