##// END OF EJS Templates
templatekw: port implementation of showparents() from changeset_templater...
Yuya Nishihara -
r26435:882b170a default
parent child Browse files
Show More
@@ -1,3348 +1,3332 b''
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import hex, bin, nullid, nullrev, short
9 9 from i18n import _
10 10 import os, sys, errno, re, tempfile, cStringIO, shutil
11 11 import util, scmutil, templater, patch, error, templatekw, revlog, copies
12 12 import match as matchmod
13 13 import repair, graphmod, revset, phases, obsolete, pathutil
14 14 import changelog
15 15 import bookmarks
16 16 import encoding
17 17 import formatter
18 18 import crecord as crecordmod
19 19 import lock as lockmod
20 20
21 21 def ishunk(x):
22 22 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
23 23 return isinstance(x, hunkclasses)
24 24
25 25 def newandmodified(chunks, originalchunks):
26 26 newlyaddedandmodifiedfiles = set()
27 27 for chunk in chunks:
28 28 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
29 29 originalchunks:
30 30 newlyaddedandmodifiedfiles.add(chunk.header.filename())
31 31 return newlyaddedandmodifiedfiles
32 32
33 33 def parsealiases(cmd):
34 34 return cmd.lstrip("^").split("|")
35 35
36 36 def setupwrapcolorwrite(ui):
37 37 # wrap ui.write so diff output can be labeled/colorized
38 38 def wrapwrite(orig, *args, **kw):
39 39 label = kw.pop('label', '')
40 40 for chunk, l in patch.difflabel(lambda: args):
41 41 orig(chunk, label=label + l)
42 42
43 43 oldwrite = ui.write
44 44 def wrap(*args, **kwargs):
45 45 return wrapwrite(oldwrite, *args, **kwargs)
46 46 setattr(ui, 'write', wrap)
47 47 return oldwrite
48 48
49 49 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
50 50 if usecurses:
51 51 if testfile:
52 52 recordfn = crecordmod.testdecorator(testfile,
53 53 crecordmod.testchunkselector)
54 54 else:
55 55 recordfn = crecordmod.chunkselector
56 56
57 57 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
58 58
59 59 else:
60 60 return patch.filterpatch(ui, originalhunks, operation)
61 61
62 62 def recordfilter(ui, originalhunks, operation=None):
63 63 """ Prompts the user to filter the originalhunks and return a list of
64 64 selected hunks.
65 65 *operation* is used for ui purposes to indicate the user
66 66 what kind of filtering they are doing: reverting, commiting, shelving, etc.
67 67 *operation* has to be a translated string.
68 68 """
69 69 usecurses = ui.configbool('experimental', 'crecord', False)
70 70 testfile = ui.config('experimental', 'crecordtest', None)
71 71 oldwrite = setupwrapcolorwrite(ui)
72 72 try:
73 73 newchunks = filterchunks(ui, originalhunks, usecurses, testfile,
74 74 operation)
75 75 finally:
76 76 ui.write = oldwrite
77 77 return newchunks
78 78
79 79 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
80 80 filterfn, *pats, **opts):
81 81 import merge as mergemod
82 82
83 83 if not ui.interactive():
84 84 if cmdsuggest:
85 85 msg = _('running non-interactively, use %s instead') % cmdsuggest
86 86 else:
87 87 msg = _('running non-interactively')
88 88 raise util.Abort(msg)
89 89
90 90 # make sure username is set before going interactive
91 91 if not opts.get('user'):
92 92 ui.username() # raise exception, username not provided
93 93
94 94 def recordfunc(ui, repo, message, match, opts):
95 95 """This is generic record driver.
96 96
97 97 Its job is to interactively filter local changes, and
98 98 accordingly prepare working directory into a state in which the
99 99 job can be delegated to a non-interactive commit command such as
100 100 'commit' or 'qrefresh'.
101 101
102 102 After the actual job is done by non-interactive command, the
103 103 working directory is restored to its original state.
104 104
105 105 In the end we'll record interesting changes, and everything else
106 106 will be left in place, so the user can continue working.
107 107 """
108 108
109 109 checkunfinished(repo, commit=True)
110 110 merge = len(repo[None].parents()) > 1
111 111 if merge:
112 112 raise util.Abort(_('cannot partially commit a merge '
113 113 '(use "hg commit" instead)'))
114 114
115 115 status = repo.status(match=match)
116 116 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
117 117 diffopts.nodates = True
118 118 diffopts.git = True
119 119 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
120 120 originalchunks = patch.parsepatch(originaldiff)
121 121
122 122 # 1. filter patch, so we have intending-to apply subset of it
123 123 try:
124 124 chunks = filterfn(ui, originalchunks)
125 125 except patch.PatchError as err:
126 126 raise util.Abort(_('error parsing patch: %s') % err)
127 127
128 128 # We need to keep a backup of files that have been newly added and
129 129 # modified during the recording process because there is a previous
130 130 # version without the edit in the workdir
131 131 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
132 132 contenders = set()
133 133 for h in chunks:
134 134 try:
135 135 contenders.update(set(h.files()))
136 136 except AttributeError:
137 137 pass
138 138
139 139 changed = status.modified + status.added + status.removed
140 140 newfiles = [f for f in changed if f in contenders]
141 141 if not newfiles:
142 142 ui.status(_('no changes to record\n'))
143 143 return 0
144 144
145 145 modified = set(status.modified)
146 146
147 147 # 2. backup changed files, so we can restore them in the end
148 148
149 149 if backupall:
150 150 tobackup = changed
151 151 else:
152 152 tobackup = [f for f in newfiles if f in modified or f in \
153 153 newlyaddedandmodifiedfiles]
154 154 backups = {}
155 155 if tobackup:
156 156 backupdir = repo.join('record-backups')
157 157 try:
158 158 os.mkdir(backupdir)
159 159 except OSError as err:
160 160 if err.errno != errno.EEXIST:
161 161 raise
162 162 try:
163 163 # backup continues
164 164 for f in tobackup:
165 165 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
166 166 dir=backupdir)
167 167 os.close(fd)
168 168 ui.debug('backup %r as %r\n' % (f, tmpname))
169 169 util.copyfile(repo.wjoin(f), tmpname)
170 170 shutil.copystat(repo.wjoin(f), tmpname)
171 171 backups[f] = tmpname
172 172
173 173 fp = cStringIO.StringIO()
174 174 for c in chunks:
175 175 fname = c.filename()
176 176 if fname in backups:
177 177 c.write(fp)
178 178 dopatch = fp.tell()
179 179 fp.seek(0)
180 180
181 181 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
182 182 # 3a. apply filtered patch to clean repo (clean)
183 183 if backups:
184 184 # Equivalent to hg.revert
185 185 choices = lambda key: key in backups
186 186 mergemod.update(repo, repo.dirstate.p1(),
187 187 False, True, choices)
188 188
189 189 # 3b. (apply)
190 190 if dopatch:
191 191 try:
192 192 ui.debug('applying patch\n')
193 193 ui.debug(fp.getvalue())
194 194 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
195 195 except patch.PatchError as err:
196 196 raise util.Abort(str(err))
197 197 del fp
198 198
199 199 # 4. We prepared working directory according to filtered
200 200 # patch. Now is the time to delegate the job to
201 201 # commit/qrefresh or the like!
202 202
203 203 # Make all of the pathnames absolute.
204 204 newfiles = [repo.wjoin(nf) for nf in newfiles]
205 205 return commitfunc(ui, repo, *newfiles, **opts)
206 206 finally:
207 207 # 5. finally restore backed-up files
208 208 try:
209 209 dirstate = repo.dirstate
210 210 for realname, tmpname in backups.iteritems():
211 211 ui.debug('restoring %r to %r\n' % (tmpname, realname))
212 212
213 213 if dirstate[realname] == 'n':
214 214 # without normallookup, restoring timestamp
215 215 # may cause partially committed files
216 216 # to be treated as unmodified
217 217 dirstate.normallookup(realname)
218 218
219 219 util.copyfile(tmpname, repo.wjoin(realname))
220 220 # Our calls to copystat() here and above are a
221 221 # hack to trick any editors that have f open that
222 222 # we haven't modified them.
223 223 #
224 224 # Also note that this racy as an editor could
225 225 # notice the file's mtime before we've finished
226 226 # writing it.
227 227 shutil.copystat(tmpname, repo.wjoin(realname))
228 228 os.unlink(tmpname)
229 229 if tobackup:
230 230 os.rmdir(backupdir)
231 231 except OSError:
232 232 pass
233 233
234 234 def recordinwlock(ui, repo, message, match, opts):
235 235 wlock = repo.wlock()
236 236 try:
237 237 return recordfunc(ui, repo, message, match, opts)
238 238 finally:
239 239 wlock.release()
240 240
241 241 return commit(ui, repo, recordinwlock, pats, opts)
242 242
243 243 def findpossible(cmd, table, strict=False):
244 244 """
245 245 Return cmd -> (aliases, command table entry)
246 246 for each matching command.
247 247 Return debug commands (or their aliases) only if no normal command matches.
248 248 """
249 249 choice = {}
250 250 debugchoice = {}
251 251
252 252 if cmd in table:
253 253 # short-circuit exact matches, "log" alias beats "^log|history"
254 254 keys = [cmd]
255 255 else:
256 256 keys = table.keys()
257 257
258 258 allcmds = []
259 259 for e in keys:
260 260 aliases = parsealiases(e)
261 261 allcmds.extend(aliases)
262 262 found = None
263 263 if cmd in aliases:
264 264 found = cmd
265 265 elif not strict:
266 266 for a in aliases:
267 267 if a.startswith(cmd):
268 268 found = a
269 269 break
270 270 if found is not None:
271 271 if aliases[0].startswith("debug") or found.startswith("debug"):
272 272 debugchoice[found] = (aliases, table[e])
273 273 else:
274 274 choice[found] = (aliases, table[e])
275 275
276 276 if not choice and debugchoice:
277 277 choice = debugchoice
278 278
279 279 return choice, allcmds
280 280
281 281 def findcmd(cmd, table, strict=True):
282 282 """Return (aliases, command table entry) for command string."""
283 283 choice, allcmds = findpossible(cmd, table, strict)
284 284
285 285 if cmd in choice:
286 286 return choice[cmd]
287 287
288 288 if len(choice) > 1:
289 289 clist = choice.keys()
290 290 clist.sort()
291 291 raise error.AmbiguousCommand(cmd, clist)
292 292
293 293 if choice:
294 294 return choice.values()[0]
295 295
296 296 raise error.UnknownCommand(cmd, allcmds)
297 297
298 298 def findrepo(p):
299 299 while not os.path.isdir(os.path.join(p, ".hg")):
300 300 oldp, p = p, os.path.dirname(p)
301 301 if p == oldp:
302 302 return None
303 303
304 304 return p
305 305
306 306 def bailifchanged(repo, merge=True):
307 307 if merge and repo.dirstate.p2() != nullid:
308 308 raise util.Abort(_('outstanding uncommitted merge'))
309 309 modified, added, removed, deleted = repo.status()[:4]
310 310 if modified or added or removed or deleted:
311 311 raise util.Abort(_('uncommitted changes'))
312 312 ctx = repo[None]
313 313 for s in sorted(ctx.substate):
314 314 ctx.sub(s).bailifchanged()
315 315
316 316 def logmessage(ui, opts):
317 317 """ get the log message according to -m and -l option """
318 318 message = opts.get('message')
319 319 logfile = opts.get('logfile')
320 320
321 321 if message and logfile:
322 322 raise util.Abort(_('options --message and --logfile are mutually '
323 323 'exclusive'))
324 324 if not message and logfile:
325 325 try:
326 326 if logfile == '-':
327 327 message = ui.fin.read()
328 328 else:
329 329 message = '\n'.join(util.readfile(logfile).splitlines())
330 330 except IOError as inst:
331 331 raise util.Abort(_("can't read commit message '%s': %s") %
332 332 (logfile, inst.strerror))
333 333 return message
334 334
335 335 def mergeeditform(ctxorbool, baseformname):
336 336 """return appropriate editform name (referencing a committemplate)
337 337
338 338 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
339 339 merging is committed.
340 340
341 341 This returns baseformname with '.merge' appended if it is a merge,
342 342 otherwise '.normal' is appended.
343 343 """
344 344 if isinstance(ctxorbool, bool):
345 345 if ctxorbool:
346 346 return baseformname + ".merge"
347 347 elif 1 < len(ctxorbool.parents()):
348 348 return baseformname + ".merge"
349 349
350 350 return baseformname + ".normal"
351 351
352 352 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
353 353 editform='', **opts):
354 354 """get appropriate commit message editor according to '--edit' option
355 355
356 356 'finishdesc' is a function to be called with edited commit message
357 357 (= 'description' of the new changeset) just after editing, but
358 358 before checking empty-ness. It should return actual text to be
359 359 stored into history. This allows to change description before
360 360 storing.
361 361
362 362 'extramsg' is a extra message to be shown in the editor instead of
363 363 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
364 364 is automatically added.
365 365
366 366 'editform' is a dot-separated list of names, to distinguish
367 367 the purpose of commit text editing.
368 368
369 369 'getcommiteditor' returns 'commitforceeditor' regardless of
370 370 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
371 371 they are specific for usage in MQ.
372 372 """
373 373 if edit or finishdesc or extramsg:
374 374 return lambda r, c, s: commitforceeditor(r, c, s,
375 375 finishdesc=finishdesc,
376 376 extramsg=extramsg,
377 377 editform=editform)
378 378 elif editform:
379 379 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
380 380 else:
381 381 return commiteditor
382 382
383 383 def loglimit(opts):
384 384 """get the log limit according to option -l/--limit"""
385 385 limit = opts.get('limit')
386 386 if limit:
387 387 try:
388 388 limit = int(limit)
389 389 except ValueError:
390 390 raise util.Abort(_('limit must be a positive integer'))
391 391 if limit <= 0:
392 392 raise util.Abort(_('limit must be positive'))
393 393 else:
394 394 limit = None
395 395 return limit
396 396
397 397 def makefilename(repo, pat, node, desc=None,
398 398 total=None, seqno=None, revwidth=None, pathname=None):
399 399 node_expander = {
400 400 'H': lambda: hex(node),
401 401 'R': lambda: str(repo.changelog.rev(node)),
402 402 'h': lambda: short(node),
403 403 'm': lambda: re.sub('[^\w]', '_', str(desc))
404 404 }
405 405 expander = {
406 406 '%': lambda: '%',
407 407 'b': lambda: os.path.basename(repo.root),
408 408 }
409 409
410 410 try:
411 411 if node:
412 412 expander.update(node_expander)
413 413 if node:
414 414 expander['r'] = (lambda:
415 415 str(repo.changelog.rev(node)).zfill(revwidth or 0))
416 416 if total is not None:
417 417 expander['N'] = lambda: str(total)
418 418 if seqno is not None:
419 419 expander['n'] = lambda: str(seqno)
420 420 if total is not None and seqno is not None:
421 421 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
422 422 if pathname is not None:
423 423 expander['s'] = lambda: os.path.basename(pathname)
424 424 expander['d'] = lambda: os.path.dirname(pathname) or '.'
425 425 expander['p'] = lambda: pathname
426 426
427 427 newname = []
428 428 patlen = len(pat)
429 429 i = 0
430 430 while i < patlen:
431 431 c = pat[i]
432 432 if c == '%':
433 433 i += 1
434 434 c = pat[i]
435 435 c = expander[c]()
436 436 newname.append(c)
437 437 i += 1
438 438 return ''.join(newname)
439 439 except KeyError as inst:
440 440 raise util.Abort(_("invalid format spec '%%%s' in output filename") %
441 441 inst.args[0])
442 442
443 443 def makefileobj(repo, pat, node=None, desc=None, total=None,
444 444 seqno=None, revwidth=None, mode='wb', modemap=None,
445 445 pathname=None):
446 446
447 447 writable = mode not in ('r', 'rb')
448 448
449 449 if not pat or pat == '-':
450 450 if writable:
451 451 fp = repo.ui.fout
452 452 else:
453 453 fp = repo.ui.fin
454 454 if util.safehasattr(fp, 'fileno'):
455 455 return os.fdopen(os.dup(fp.fileno()), mode)
456 456 else:
457 457 # if this fp can't be duped properly, return
458 458 # a dummy object that can be closed
459 459 class wrappedfileobj(object):
460 460 noop = lambda x: None
461 461 def __init__(self, f):
462 462 self.f = f
463 463 def __getattr__(self, attr):
464 464 if attr == 'close':
465 465 return self.noop
466 466 else:
467 467 return getattr(self.f, attr)
468 468
469 469 return wrappedfileobj(fp)
470 470 if util.safehasattr(pat, 'write') and writable:
471 471 return pat
472 472 if util.safehasattr(pat, 'read') and 'r' in mode:
473 473 return pat
474 474 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
475 475 if modemap is not None:
476 476 mode = modemap.get(fn, mode)
477 477 if mode == 'wb':
478 478 modemap[fn] = 'ab'
479 479 return open(fn, mode)
480 480
481 481 def openrevlog(repo, cmd, file_, opts):
482 482 """opens the changelog, manifest, a filelog or a given revlog"""
483 483 cl = opts['changelog']
484 484 mf = opts['manifest']
485 485 dir = opts['dir']
486 486 msg = None
487 487 if cl and mf:
488 488 msg = _('cannot specify --changelog and --manifest at the same time')
489 489 elif cl and dir:
490 490 msg = _('cannot specify --changelog and --dir at the same time')
491 491 elif cl or mf:
492 492 if file_:
493 493 msg = _('cannot specify filename with --changelog or --manifest')
494 494 elif not repo:
495 495 msg = _('cannot specify --changelog or --manifest or --dir '
496 496 'without a repository')
497 497 if msg:
498 498 raise util.Abort(msg)
499 499
500 500 r = None
501 501 if repo:
502 502 if cl:
503 503 r = repo.unfiltered().changelog
504 504 elif dir:
505 505 if 'treemanifest' not in repo.requirements:
506 506 raise util.Abort(_("--dir can only be used on repos with "
507 507 "treemanifest enabled"))
508 508 dirlog = repo.dirlog(file_)
509 509 if len(dirlog):
510 510 r = dirlog
511 511 elif mf:
512 512 r = repo.manifest
513 513 elif file_:
514 514 filelog = repo.file(file_)
515 515 if len(filelog):
516 516 r = filelog
517 517 if not r:
518 518 if not file_:
519 519 raise error.CommandError(cmd, _('invalid arguments'))
520 520 if not os.path.isfile(file_):
521 521 raise util.Abort(_("revlog '%s' not found") % file_)
522 522 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
523 523 file_[:-2] + ".i")
524 524 return r
525 525
526 526 def copy(ui, repo, pats, opts, rename=False):
527 527 # called with the repo lock held
528 528 #
529 529 # hgsep => pathname that uses "/" to separate directories
530 530 # ossep => pathname that uses os.sep to separate directories
531 531 cwd = repo.getcwd()
532 532 targets = {}
533 533 after = opts.get("after")
534 534 dryrun = opts.get("dry_run")
535 535 wctx = repo[None]
536 536
537 537 def walkpat(pat):
538 538 srcs = []
539 539 if after:
540 540 badstates = '?'
541 541 else:
542 542 badstates = '?r'
543 543 m = scmutil.match(repo[None], [pat], opts, globbed=True)
544 544 for abs in repo.walk(m):
545 545 state = repo.dirstate[abs]
546 546 rel = m.rel(abs)
547 547 exact = m.exact(abs)
548 548 if state in badstates:
549 549 if exact and state == '?':
550 550 ui.warn(_('%s: not copying - file is not managed\n') % rel)
551 551 if exact and state == 'r':
552 552 ui.warn(_('%s: not copying - file has been marked for'
553 553 ' remove\n') % rel)
554 554 continue
555 555 # abs: hgsep
556 556 # rel: ossep
557 557 srcs.append((abs, rel, exact))
558 558 return srcs
559 559
560 560 # abssrc: hgsep
561 561 # relsrc: ossep
562 562 # otarget: ossep
563 563 def copyfile(abssrc, relsrc, otarget, exact):
564 564 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
565 565 if '/' in abstarget:
566 566 # We cannot normalize abstarget itself, this would prevent
567 567 # case only renames, like a => A.
568 568 abspath, absname = abstarget.rsplit('/', 1)
569 569 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
570 570 reltarget = repo.pathto(abstarget, cwd)
571 571 target = repo.wjoin(abstarget)
572 572 src = repo.wjoin(abssrc)
573 573 state = repo.dirstate[abstarget]
574 574
575 575 scmutil.checkportable(ui, abstarget)
576 576
577 577 # check for collisions
578 578 prevsrc = targets.get(abstarget)
579 579 if prevsrc is not None:
580 580 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
581 581 (reltarget, repo.pathto(abssrc, cwd),
582 582 repo.pathto(prevsrc, cwd)))
583 583 return
584 584
585 585 # check for overwrites
586 586 exists = os.path.lexists(target)
587 587 samefile = False
588 588 if exists and abssrc != abstarget:
589 589 if (repo.dirstate.normalize(abssrc) ==
590 590 repo.dirstate.normalize(abstarget)):
591 591 if not rename:
592 592 ui.warn(_("%s: can't copy - same file\n") % reltarget)
593 593 return
594 594 exists = False
595 595 samefile = True
596 596
597 597 if not after and exists or after and state in 'mn':
598 598 if not opts['force']:
599 599 ui.warn(_('%s: not overwriting - file exists\n') %
600 600 reltarget)
601 601 return
602 602
603 603 if after:
604 604 if not exists:
605 605 if rename:
606 606 ui.warn(_('%s: not recording move - %s does not exist\n') %
607 607 (relsrc, reltarget))
608 608 else:
609 609 ui.warn(_('%s: not recording copy - %s does not exist\n') %
610 610 (relsrc, reltarget))
611 611 return
612 612 elif not dryrun:
613 613 try:
614 614 if exists:
615 615 os.unlink(target)
616 616 targetdir = os.path.dirname(target) or '.'
617 617 if not os.path.isdir(targetdir):
618 618 os.makedirs(targetdir)
619 619 if samefile:
620 620 tmp = target + "~hgrename"
621 621 os.rename(src, tmp)
622 622 os.rename(tmp, target)
623 623 else:
624 624 util.copyfile(src, target)
625 625 srcexists = True
626 626 except IOError as inst:
627 627 if inst.errno == errno.ENOENT:
628 628 ui.warn(_('%s: deleted in working directory\n') % relsrc)
629 629 srcexists = False
630 630 else:
631 631 ui.warn(_('%s: cannot copy - %s\n') %
632 632 (relsrc, inst.strerror))
633 633 return True # report a failure
634 634
635 635 if ui.verbose or not exact:
636 636 if rename:
637 637 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
638 638 else:
639 639 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
640 640
641 641 targets[abstarget] = abssrc
642 642
643 643 # fix up dirstate
644 644 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
645 645 dryrun=dryrun, cwd=cwd)
646 646 if rename and not dryrun:
647 647 if not after and srcexists and not samefile:
648 648 util.unlinkpath(repo.wjoin(abssrc))
649 649 wctx.forget([abssrc])
650 650
651 651 # pat: ossep
652 652 # dest ossep
653 653 # srcs: list of (hgsep, hgsep, ossep, bool)
654 654 # return: function that takes hgsep and returns ossep
655 655 def targetpathfn(pat, dest, srcs):
656 656 if os.path.isdir(pat):
657 657 abspfx = pathutil.canonpath(repo.root, cwd, pat)
658 658 abspfx = util.localpath(abspfx)
659 659 if destdirexists:
660 660 striplen = len(os.path.split(abspfx)[0])
661 661 else:
662 662 striplen = len(abspfx)
663 663 if striplen:
664 664 striplen += len(os.sep)
665 665 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
666 666 elif destdirexists:
667 667 res = lambda p: os.path.join(dest,
668 668 os.path.basename(util.localpath(p)))
669 669 else:
670 670 res = lambda p: dest
671 671 return res
672 672
673 673 # pat: ossep
674 674 # dest ossep
675 675 # srcs: list of (hgsep, hgsep, ossep, bool)
676 676 # return: function that takes hgsep and returns ossep
677 677 def targetpathafterfn(pat, dest, srcs):
678 678 if matchmod.patkind(pat):
679 679 # a mercurial pattern
680 680 res = lambda p: os.path.join(dest,
681 681 os.path.basename(util.localpath(p)))
682 682 else:
683 683 abspfx = pathutil.canonpath(repo.root, cwd, pat)
684 684 if len(abspfx) < len(srcs[0][0]):
685 685 # A directory. Either the target path contains the last
686 686 # component of the source path or it does not.
687 687 def evalpath(striplen):
688 688 score = 0
689 689 for s in srcs:
690 690 t = os.path.join(dest, util.localpath(s[0])[striplen:])
691 691 if os.path.lexists(t):
692 692 score += 1
693 693 return score
694 694
695 695 abspfx = util.localpath(abspfx)
696 696 striplen = len(abspfx)
697 697 if striplen:
698 698 striplen += len(os.sep)
699 699 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
700 700 score = evalpath(striplen)
701 701 striplen1 = len(os.path.split(abspfx)[0])
702 702 if striplen1:
703 703 striplen1 += len(os.sep)
704 704 if evalpath(striplen1) > score:
705 705 striplen = striplen1
706 706 res = lambda p: os.path.join(dest,
707 707 util.localpath(p)[striplen:])
708 708 else:
709 709 # a file
710 710 if destdirexists:
711 711 res = lambda p: os.path.join(dest,
712 712 os.path.basename(util.localpath(p)))
713 713 else:
714 714 res = lambda p: dest
715 715 return res
716 716
717 717 pats = scmutil.expandpats(pats)
718 718 if not pats:
719 719 raise util.Abort(_('no source or destination specified'))
720 720 if len(pats) == 1:
721 721 raise util.Abort(_('no destination specified'))
722 722 dest = pats.pop()
723 723 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
724 724 if not destdirexists:
725 725 if len(pats) > 1 or matchmod.patkind(pats[0]):
726 726 raise util.Abort(_('with multiple sources, destination must be an '
727 727 'existing directory'))
728 728 if util.endswithsep(dest):
729 729 raise util.Abort(_('destination %s is not a directory') % dest)
730 730
731 731 tfn = targetpathfn
732 732 if after:
733 733 tfn = targetpathafterfn
734 734 copylist = []
735 735 for pat in pats:
736 736 srcs = walkpat(pat)
737 737 if not srcs:
738 738 continue
739 739 copylist.append((tfn(pat, dest, srcs), srcs))
740 740 if not copylist:
741 741 raise util.Abort(_('no files to copy'))
742 742
743 743 errors = 0
744 744 for targetpath, srcs in copylist:
745 745 for abssrc, relsrc, exact in srcs:
746 746 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
747 747 errors += 1
748 748
749 749 if errors:
750 750 ui.warn(_('(consider using --after)\n'))
751 751
752 752 return errors != 0
753 753
754 754 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
755 755 runargs=None, appendpid=False):
756 756 '''Run a command as a service.'''
757 757
758 758 def writepid(pid):
759 759 if opts['pid_file']:
760 760 if appendpid:
761 761 mode = 'a'
762 762 else:
763 763 mode = 'w'
764 764 fp = open(opts['pid_file'], mode)
765 765 fp.write(str(pid) + '\n')
766 766 fp.close()
767 767
768 768 if opts['daemon'] and not opts['daemon_pipefds']:
769 769 # Signal child process startup with file removal
770 770 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
771 771 os.close(lockfd)
772 772 try:
773 773 if not runargs:
774 774 runargs = util.hgcmd() + sys.argv[1:]
775 775 runargs.append('--daemon-pipefds=%s' % lockpath)
776 776 # Don't pass --cwd to the child process, because we've already
777 777 # changed directory.
778 778 for i in xrange(1, len(runargs)):
779 779 if runargs[i].startswith('--cwd='):
780 780 del runargs[i]
781 781 break
782 782 elif runargs[i].startswith('--cwd'):
783 783 del runargs[i:i + 2]
784 784 break
785 785 def condfn():
786 786 return not os.path.exists(lockpath)
787 787 pid = util.rundetached(runargs, condfn)
788 788 if pid < 0:
789 789 raise util.Abort(_('child process failed to start'))
790 790 writepid(pid)
791 791 finally:
792 792 try:
793 793 os.unlink(lockpath)
794 794 except OSError as e:
795 795 if e.errno != errno.ENOENT:
796 796 raise
797 797 if parentfn:
798 798 return parentfn(pid)
799 799 else:
800 800 return
801 801
802 802 if initfn:
803 803 initfn()
804 804
805 805 if not opts['daemon']:
806 806 writepid(os.getpid())
807 807
808 808 if opts['daemon_pipefds']:
809 809 lockpath = opts['daemon_pipefds']
810 810 try:
811 811 os.setsid()
812 812 except AttributeError:
813 813 pass
814 814 os.unlink(lockpath)
815 815 util.hidewindow()
816 816 sys.stdout.flush()
817 817 sys.stderr.flush()
818 818
819 819 nullfd = os.open(os.devnull, os.O_RDWR)
820 820 logfilefd = nullfd
821 821 if logfile:
822 822 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
823 823 os.dup2(nullfd, 0)
824 824 os.dup2(logfilefd, 1)
825 825 os.dup2(logfilefd, 2)
826 826 if nullfd not in (0, 1, 2):
827 827 os.close(nullfd)
828 828 if logfile and logfilefd not in (0, 1, 2):
829 829 os.close(logfilefd)
830 830
831 831 if runfn:
832 832 return runfn()
833 833
834 834 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
835 835 """Utility function used by commands.import to import a single patch
836 836
837 837 This function is explicitly defined here to help the evolve extension to
838 838 wrap this part of the import logic.
839 839
840 840 The API is currently a bit ugly because it a simple code translation from
841 841 the import command. Feel free to make it better.
842 842
843 843 :hunk: a patch (as a binary string)
844 844 :parents: nodes that will be parent of the created commit
845 845 :opts: the full dict of option passed to the import command
846 846 :msgs: list to save commit message to.
847 847 (used in case we need to save it when failing)
848 848 :updatefunc: a function that update a repo to a given node
849 849 updatefunc(<repo>, <node>)
850 850 """
851 851 # avoid cycle context -> subrepo -> cmdutil
852 852 import context
853 853 tmpname, message, user, date, branch, nodeid, p1, p2 = \
854 854 patch.extract(ui, hunk)
855 855
856 856 update = not opts.get('bypass')
857 857 strip = opts["strip"]
858 858 prefix = opts["prefix"]
859 859 sim = float(opts.get('similarity') or 0)
860 860 if not tmpname:
861 861 return (None, None, False)
862 862 msg = _('applied to working directory')
863 863
864 864 rejects = False
865 865 dsguard = None
866 866
867 867 try:
868 868 cmdline_message = logmessage(ui, opts)
869 869 if cmdline_message:
870 870 # pickup the cmdline msg
871 871 message = cmdline_message
872 872 elif message:
873 873 # pickup the patch msg
874 874 message = message.strip()
875 875 else:
876 876 # launch the editor
877 877 message = None
878 878 ui.debug('message:\n%s\n' % message)
879 879
880 880 if len(parents) == 1:
881 881 parents.append(repo[nullid])
882 882 if opts.get('exact'):
883 883 if not nodeid or not p1:
884 884 raise util.Abort(_('not a Mercurial patch'))
885 885 p1 = repo[p1]
886 886 p2 = repo[p2 or nullid]
887 887 elif p2:
888 888 try:
889 889 p1 = repo[p1]
890 890 p2 = repo[p2]
891 891 # Without any options, consider p2 only if the
892 892 # patch is being applied on top of the recorded
893 893 # first parent.
894 894 if p1 != parents[0]:
895 895 p1 = parents[0]
896 896 p2 = repo[nullid]
897 897 except error.RepoError:
898 898 p1, p2 = parents
899 899 if p2.node() == nullid:
900 900 ui.warn(_("warning: import the patch as a normal revision\n"
901 901 "(use --exact to import the patch as a merge)\n"))
902 902 else:
903 903 p1, p2 = parents
904 904
905 905 n = None
906 906 if update:
907 907 dsguard = dirstateguard(repo, 'tryimportone')
908 908 if p1 != parents[0]:
909 909 updatefunc(repo, p1.node())
910 910 if p2 != parents[1]:
911 911 repo.setparents(p1.node(), p2.node())
912 912
913 913 if opts.get('exact') or opts.get('import_branch'):
914 914 repo.dirstate.setbranch(branch or 'default')
915 915
916 916 partial = opts.get('partial', False)
917 917 files = set()
918 918 try:
919 919 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
920 920 files=files, eolmode=None, similarity=sim / 100.0)
921 921 except patch.PatchError as e:
922 922 if not partial:
923 923 raise util.Abort(str(e))
924 924 if partial:
925 925 rejects = True
926 926
927 927 files = list(files)
928 928 if opts.get('no_commit'):
929 929 if message:
930 930 msgs.append(message)
931 931 else:
932 932 if opts.get('exact') or p2:
933 933 # If you got here, you either use --force and know what
934 934 # you are doing or used --exact or a merge patch while
935 935 # being updated to its first parent.
936 936 m = None
937 937 else:
938 938 m = scmutil.matchfiles(repo, files or [])
939 939 editform = mergeeditform(repo[None], 'import.normal')
940 940 if opts.get('exact'):
941 941 editor = None
942 942 else:
943 943 editor = getcommiteditor(editform=editform, **opts)
944 944 allowemptyback = repo.ui.backupconfig('ui', 'allowemptycommit')
945 945 try:
946 946 if partial:
947 947 repo.ui.setconfig('ui', 'allowemptycommit', True)
948 948 n = repo.commit(message, opts.get('user') or user,
949 949 opts.get('date') or date, match=m,
950 950 editor=editor)
951 951 finally:
952 952 repo.ui.restoreconfig(allowemptyback)
953 953 dsguard.close()
954 954 else:
955 955 if opts.get('exact') or opts.get('import_branch'):
956 956 branch = branch or 'default'
957 957 else:
958 958 branch = p1.branch()
959 959 store = patch.filestore()
960 960 try:
961 961 files = set()
962 962 try:
963 963 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
964 964 files, eolmode=None)
965 965 except patch.PatchError as e:
966 966 raise util.Abort(str(e))
967 967 if opts.get('exact'):
968 968 editor = None
969 969 else:
970 970 editor = getcommiteditor(editform='import.bypass')
971 971 memctx = context.makememctx(repo, (p1.node(), p2.node()),
972 972 message,
973 973 opts.get('user') or user,
974 974 opts.get('date') or date,
975 975 branch, files, store,
976 976 editor=editor)
977 977 n = memctx.commit()
978 978 finally:
979 979 store.close()
980 980 if opts.get('exact') and opts.get('no_commit'):
981 981 # --exact with --no-commit is still useful in that it does merge
982 982 # and branch bits
983 983 ui.warn(_("warning: can't check exact import with --no-commit\n"))
984 984 elif opts.get('exact') and hex(n) != nodeid:
985 985 raise util.Abort(_('patch is damaged or loses information'))
986 986 if n:
987 987 # i18n: refers to a short changeset id
988 988 msg = _('created %s') % short(n)
989 989 return (msg, n, rejects)
990 990 finally:
991 991 lockmod.release(dsguard)
992 992 os.unlink(tmpname)
993 993
994 994 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
995 995 opts=None, match=None):
996 996 '''export changesets as hg patches.'''
997 997
998 998 total = len(revs)
999 999 revwidth = max([len(str(rev)) for rev in revs])
1000 1000 filemode = {}
1001 1001
1002 1002 def single(rev, seqno, fp):
1003 1003 ctx = repo[rev]
1004 1004 node = ctx.node()
1005 1005 parents = [p.node() for p in ctx.parents() if p]
1006 1006 branch = ctx.branch()
1007 1007 if switch_parent:
1008 1008 parents.reverse()
1009 1009
1010 1010 if parents:
1011 1011 prev = parents[0]
1012 1012 else:
1013 1013 prev = nullid
1014 1014
1015 1015 shouldclose = False
1016 1016 if not fp and len(template) > 0:
1017 1017 desc_lines = ctx.description().rstrip().split('\n')
1018 1018 desc = desc_lines[0] #Commit always has a first line.
1019 1019 fp = makefileobj(repo, template, node, desc=desc, total=total,
1020 1020 seqno=seqno, revwidth=revwidth, mode='wb',
1021 1021 modemap=filemode)
1022 1022 if fp != template:
1023 1023 shouldclose = True
1024 1024 if fp and fp != sys.stdout and util.safehasattr(fp, 'name'):
1025 1025 repo.ui.note("%s\n" % fp.name)
1026 1026
1027 1027 if not fp:
1028 1028 write = repo.ui.write
1029 1029 else:
1030 1030 def write(s, **kw):
1031 1031 fp.write(s)
1032 1032
1033 1033 write("# HG changeset patch\n")
1034 1034 write("# User %s\n" % ctx.user())
1035 1035 write("# Date %d %d\n" % ctx.date())
1036 1036 write("# %s\n" % util.datestr(ctx.date()))
1037 1037 if branch and branch != 'default':
1038 1038 write("# Branch %s\n" % branch)
1039 1039 write("# Node ID %s\n" % hex(node))
1040 1040 write("# Parent %s\n" % hex(prev))
1041 1041 if len(parents) > 1:
1042 1042 write("# Parent %s\n" % hex(parents[1]))
1043 1043 write(ctx.description().rstrip())
1044 1044 write("\n\n")
1045 1045
1046 1046 for chunk, label in patch.diffui(repo, prev, node, match, opts=opts):
1047 1047 write(chunk, label=label)
1048 1048
1049 1049 if shouldclose:
1050 1050 fp.close()
1051 1051
1052 1052 for seqno, rev in enumerate(revs):
1053 1053 single(rev, seqno + 1, fp)
1054 1054
1055 1055 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1056 1056 changes=None, stat=False, fp=None, prefix='',
1057 1057 root='', listsubrepos=False):
1058 1058 '''show diff or diffstat.'''
1059 1059 if fp is None:
1060 1060 write = ui.write
1061 1061 else:
1062 1062 def write(s, **kw):
1063 1063 fp.write(s)
1064 1064
1065 1065 if root:
1066 1066 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1067 1067 else:
1068 1068 relroot = ''
1069 1069 if relroot != '':
1070 1070 # XXX relative roots currently don't work if the root is within a
1071 1071 # subrepo
1072 1072 uirelroot = match.uipath(relroot)
1073 1073 relroot += '/'
1074 1074 for matchroot in match.files():
1075 1075 if not matchroot.startswith(relroot):
1076 1076 ui.warn(_('warning: %s not inside relative root %s\n') % (
1077 1077 match.uipath(matchroot), uirelroot))
1078 1078
1079 1079 if stat:
1080 1080 diffopts = diffopts.copy(context=0)
1081 1081 width = 80
1082 1082 if not ui.plain():
1083 1083 width = ui.termwidth()
1084 1084 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1085 1085 prefix=prefix, relroot=relroot)
1086 1086 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1087 1087 width=width,
1088 1088 git=diffopts.git):
1089 1089 write(chunk, label=label)
1090 1090 else:
1091 1091 for chunk, label in patch.diffui(repo, node1, node2, match,
1092 1092 changes, diffopts, prefix=prefix,
1093 1093 relroot=relroot):
1094 1094 write(chunk, label=label)
1095 1095
1096 1096 if listsubrepos:
1097 1097 ctx1 = repo[node1]
1098 1098 ctx2 = repo[node2]
1099 1099 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1100 1100 tempnode2 = node2
1101 1101 try:
1102 1102 if node2 is not None:
1103 1103 tempnode2 = ctx2.substate[subpath][1]
1104 1104 except KeyError:
1105 1105 # A subrepo that existed in node1 was deleted between node1 and
1106 1106 # node2 (inclusive). Thus, ctx2's substate won't contain that
1107 1107 # subpath. The best we can do is to ignore it.
1108 1108 tempnode2 = None
1109 1109 submatch = matchmod.narrowmatcher(subpath, match)
1110 1110 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1111 1111 stat=stat, fp=fp, prefix=prefix)
1112 1112
1113 1113 class changeset_printer(object):
1114 1114 '''show changeset information when templating not requested.'''
1115 1115
1116 1116 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1117 1117 self.ui = ui
1118 1118 self.repo = repo
1119 1119 self.buffered = buffered
1120 1120 self.matchfn = matchfn
1121 1121 self.diffopts = diffopts
1122 1122 self.header = {}
1123 1123 self.hunk = {}
1124 1124 self.lastheader = None
1125 1125 self.footer = None
1126 1126
1127 1127 def flush(self, ctx):
1128 1128 rev = ctx.rev()
1129 1129 if rev in self.header:
1130 1130 h = self.header[rev]
1131 1131 if h != self.lastheader:
1132 1132 self.lastheader = h
1133 1133 self.ui.write(h)
1134 1134 del self.header[rev]
1135 1135 if rev in self.hunk:
1136 1136 self.ui.write(self.hunk[rev])
1137 1137 del self.hunk[rev]
1138 1138 return 1
1139 1139 return 0
1140 1140
1141 1141 def close(self):
1142 1142 if self.footer:
1143 1143 self.ui.write(self.footer)
1144 1144
1145 1145 def show(self, ctx, copies=None, matchfn=None, **props):
1146 1146 if self.buffered:
1147 1147 self.ui.pushbuffer()
1148 1148 self._show(ctx, copies, matchfn, props)
1149 1149 self.hunk[ctx.rev()] = self.ui.popbuffer(labeled=True)
1150 1150 else:
1151 1151 self._show(ctx, copies, matchfn, props)
1152 1152
1153 1153 def _show(self, ctx, copies, matchfn, props):
1154 1154 '''show a single changeset or file revision'''
1155 1155 changenode = ctx.node()
1156 1156 rev = ctx.rev()
1157 1157 if self.ui.debugflag:
1158 1158 hexfunc = hex
1159 1159 else:
1160 1160 hexfunc = short
1161 1161 # as of now, wctx.node() and wctx.rev() return None, but we want to
1162 1162 # show the same values as {node} and {rev} templatekw
1163 1163 revnode = (scmutil.intrev(rev), hexfunc(bin(ctx.hex())))
1164 1164
1165 1165 if self.ui.quiet:
1166 1166 self.ui.write("%d:%s\n" % revnode, label='log.node')
1167 1167 return
1168 1168
1169 1169 date = util.datestr(ctx.date())
1170 1170
1171 1171 # i18n: column positioning for "hg log"
1172 1172 self.ui.write(_("changeset: %d:%s\n") % revnode,
1173 1173 label='log.changeset changeset.%s' % ctx.phasestr())
1174 1174
1175 1175 # branches are shown first before any other names due to backwards
1176 1176 # compatibility
1177 1177 branch = ctx.branch()
1178 1178 # don't show the default branch name
1179 1179 if branch != 'default':
1180 1180 # i18n: column positioning for "hg log"
1181 1181 self.ui.write(_("branch: %s\n") % branch,
1182 1182 label='log.branch')
1183 1183
1184 1184 for name, ns in self.repo.names.iteritems():
1185 1185 # branches has special logic already handled above, so here we just
1186 1186 # skip it
1187 1187 if name == 'branches':
1188 1188 continue
1189 1189 # we will use the templatename as the color name since those two
1190 1190 # should be the same
1191 1191 for name in ns.names(self.repo, changenode):
1192 1192 self.ui.write(ns.logfmt % name,
1193 1193 label='log.%s' % ns.colorname)
1194 1194 if self.ui.debugflag:
1195 1195 # i18n: column positioning for "hg log"
1196 1196 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1197 1197 label='log.phase')
1198 1198 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1199 1199 label = 'log.parent changeset.%s' % pctx.phasestr()
1200 1200 # i18n: column positioning for "hg log"
1201 1201 self.ui.write(_("parent: %d:%s\n")
1202 1202 % (pctx.rev(), hexfunc(pctx.node())),
1203 1203 label=label)
1204 1204
1205 1205 if self.ui.debugflag and rev is not None:
1206 1206 mnode = ctx.manifestnode()
1207 1207 # i18n: column positioning for "hg log"
1208 1208 self.ui.write(_("manifest: %d:%s\n") %
1209 1209 (self.repo.manifest.rev(mnode), hex(mnode)),
1210 1210 label='ui.debug log.manifest')
1211 1211 # i18n: column positioning for "hg log"
1212 1212 self.ui.write(_("user: %s\n") % ctx.user(),
1213 1213 label='log.user')
1214 1214 # i18n: column positioning for "hg log"
1215 1215 self.ui.write(_("date: %s\n") % date,
1216 1216 label='log.date')
1217 1217
1218 1218 if self.ui.debugflag:
1219 1219 files = ctx.p1().status(ctx)[:3]
1220 1220 for key, value in zip([# i18n: column positioning for "hg log"
1221 1221 _("files:"),
1222 1222 # i18n: column positioning for "hg log"
1223 1223 _("files+:"),
1224 1224 # i18n: column positioning for "hg log"
1225 1225 _("files-:")], files):
1226 1226 if value:
1227 1227 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1228 1228 label='ui.debug log.files')
1229 1229 elif ctx.files() and self.ui.verbose:
1230 1230 # i18n: column positioning for "hg log"
1231 1231 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1232 1232 label='ui.note log.files')
1233 1233 if copies and self.ui.verbose:
1234 1234 copies = ['%s (%s)' % c for c in copies]
1235 1235 # i18n: column positioning for "hg log"
1236 1236 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1237 1237 label='ui.note log.copies')
1238 1238
1239 1239 extra = ctx.extra()
1240 1240 if extra and self.ui.debugflag:
1241 1241 for key, value in sorted(extra.items()):
1242 1242 # i18n: column positioning for "hg log"
1243 1243 self.ui.write(_("extra: %s=%s\n")
1244 1244 % (key, value.encode('string_escape')),
1245 1245 label='ui.debug log.extra')
1246 1246
1247 1247 description = ctx.description().strip()
1248 1248 if description:
1249 1249 if self.ui.verbose:
1250 1250 self.ui.write(_("description:\n"),
1251 1251 label='ui.note log.description')
1252 1252 self.ui.write(description,
1253 1253 label='ui.note log.description')
1254 1254 self.ui.write("\n\n")
1255 1255 else:
1256 1256 # i18n: column positioning for "hg log"
1257 1257 self.ui.write(_("summary: %s\n") %
1258 1258 description.splitlines()[0],
1259 1259 label='log.summary')
1260 1260 self.ui.write("\n")
1261 1261
1262 1262 self.showpatch(changenode, matchfn)
1263 1263
1264 1264 def showpatch(self, node, matchfn):
1265 1265 if not matchfn:
1266 1266 matchfn = self.matchfn
1267 1267 if matchfn:
1268 1268 stat = self.diffopts.get('stat')
1269 1269 diff = self.diffopts.get('patch')
1270 1270 diffopts = patch.diffallopts(self.ui, self.diffopts)
1271 1271 prev = self.repo.changelog.parents(node)[0]
1272 1272 if stat:
1273 1273 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1274 1274 match=matchfn, stat=True)
1275 1275 if diff:
1276 1276 if stat:
1277 1277 self.ui.write("\n")
1278 1278 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1279 1279 match=matchfn, stat=False)
1280 1280 self.ui.write("\n")
1281 1281
1282 1282 class jsonchangeset(changeset_printer):
1283 1283 '''format changeset information.'''
1284 1284
1285 1285 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1286 1286 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1287 1287 self.cache = {}
1288 1288 self._first = True
1289 1289
1290 1290 def close(self):
1291 1291 if not self._first:
1292 1292 self.ui.write("\n]\n")
1293 1293 else:
1294 1294 self.ui.write("[]\n")
1295 1295
1296 1296 def _show(self, ctx, copies, matchfn, props):
1297 1297 '''show a single changeset or file revision'''
1298 1298 rev = ctx.rev()
1299 1299 if rev is None:
1300 1300 jrev = jnode = 'null'
1301 1301 else:
1302 1302 jrev = str(rev)
1303 1303 jnode = '"%s"' % hex(ctx.node())
1304 1304 j = encoding.jsonescape
1305 1305
1306 1306 if self._first:
1307 1307 self.ui.write("[\n {")
1308 1308 self._first = False
1309 1309 else:
1310 1310 self.ui.write(",\n {")
1311 1311
1312 1312 if self.ui.quiet:
1313 1313 self.ui.write('\n "rev": %s' % jrev)
1314 1314 self.ui.write(',\n "node": %s' % jnode)
1315 1315 self.ui.write('\n }')
1316 1316 return
1317 1317
1318 1318 self.ui.write('\n "rev": %s' % jrev)
1319 1319 self.ui.write(',\n "node": %s' % jnode)
1320 1320 self.ui.write(',\n "branch": "%s"' % j(ctx.branch()))
1321 1321 self.ui.write(',\n "phase": "%s"' % ctx.phasestr())
1322 1322 self.ui.write(',\n "user": "%s"' % j(ctx.user()))
1323 1323 self.ui.write(',\n "date": [%d, %d]' % ctx.date())
1324 1324 self.ui.write(',\n "desc": "%s"' % j(ctx.description()))
1325 1325
1326 1326 self.ui.write(',\n "bookmarks": [%s]' %
1327 1327 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1328 1328 self.ui.write(',\n "tags": [%s]' %
1329 1329 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1330 1330 self.ui.write(',\n "parents": [%s]' %
1331 1331 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1332 1332
1333 1333 if self.ui.debugflag:
1334 1334 if rev is None:
1335 1335 jmanifestnode = 'null'
1336 1336 else:
1337 1337 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1338 1338 self.ui.write(',\n "manifest": %s' % jmanifestnode)
1339 1339
1340 1340 self.ui.write(',\n "extra": {%s}' %
1341 1341 ", ".join('"%s": "%s"' % (j(k), j(v))
1342 1342 for k, v in ctx.extra().items()))
1343 1343
1344 1344 files = ctx.p1().status(ctx)
1345 1345 self.ui.write(',\n "modified": [%s]' %
1346 1346 ", ".join('"%s"' % j(f) for f in files[0]))
1347 1347 self.ui.write(',\n "added": [%s]' %
1348 1348 ", ".join('"%s"' % j(f) for f in files[1]))
1349 1349 self.ui.write(',\n "removed": [%s]' %
1350 1350 ", ".join('"%s"' % j(f) for f in files[2]))
1351 1351
1352 1352 elif self.ui.verbose:
1353 1353 self.ui.write(',\n "files": [%s]' %
1354 1354 ", ".join('"%s"' % j(f) for f in ctx.files()))
1355 1355
1356 1356 if copies:
1357 1357 self.ui.write(',\n "copies": {%s}' %
1358 1358 ", ".join('"%s": "%s"' % (j(k), j(v))
1359 1359 for k, v in copies))
1360 1360
1361 1361 matchfn = self.matchfn
1362 1362 if matchfn:
1363 1363 stat = self.diffopts.get('stat')
1364 1364 diff = self.diffopts.get('patch')
1365 1365 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1366 1366 node, prev = ctx.node(), ctx.p1().node()
1367 1367 if stat:
1368 1368 self.ui.pushbuffer()
1369 1369 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1370 1370 match=matchfn, stat=True)
1371 1371 self.ui.write(',\n "diffstat": "%s"' % j(self.ui.popbuffer()))
1372 1372 if diff:
1373 1373 self.ui.pushbuffer()
1374 1374 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1375 1375 match=matchfn, stat=False)
1376 1376 self.ui.write(',\n "diff": "%s"' % j(self.ui.popbuffer()))
1377 1377
1378 1378 self.ui.write("\n }")
1379 1379
1380 1380 class changeset_templater(changeset_printer):
1381 1381 '''format changeset information.'''
1382 1382
1383 1383 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1384 1384 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1385 1385 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
1386 1386 defaulttempl = {
1387 1387 'parent': '{rev}:{node|formatnode} ',
1388 1388 'manifest': '{rev}:{node|formatnode}',
1389 1389 'file_copy': '{name} ({source})',
1390 1390 'extra': '{key}={value|stringescape}'
1391 1391 }
1392 1392 # filecopy is preserved for compatibility reasons
1393 1393 defaulttempl['filecopy'] = defaulttempl['file_copy']
1394 1394 self.t = templater.templater(mapfile, {'formatnode': formatnode},
1395 1395 cache=defaulttempl)
1396 1396 if tmpl:
1397 1397 self.t.cache['changeset'] = tmpl
1398 1398
1399 1399 self.cache = {}
1400 1400
1401 1401 # find correct templates for current mode
1402 1402 tmplmodes = [
1403 1403 (True, None),
1404 1404 (self.ui.verbose, 'verbose'),
1405 1405 (self.ui.quiet, 'quiet'),
1406 1406 (self.ui.debugflag, 'debug'),
1407 1407 ]
1408 1408
1409 1409 self._parts = {'header': '', 'footer': '', 'changeset': 'changeset',
1410 1410 'docheader': '', 'docfooter': ''}
1411 1411 for mode, postfix in tmplmodes:
1412 1412 for t in self._parts:
1413 1413 cur = t
1414 1414 if postfix:
1415 1415 cur += "_" + postfix
1416 1416 if mode and cur in self.t:
1417 1417 self._parts[t] = cur
1418 1418
1419 1419 if self._parts['docheader']:
1420 1420 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1421 1421
1422 1422 def close(self):
1423 1423 if self._parts['docfooter']:
1424 1424 if not self.footer:
1425 1425 self.footer = ""
1426 1426 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1427 1427 return super(changeset_templater, self).close()
1428 1428
1429 1429 def _show(self, ctx, copies, matchfn, props):
1430 1430 '''show a single changeset or file revision'''
1431
1432 showlist = templatekw.showlist
1433
1434 # showparents() behavior depends on ui trace level which
1435 # causes unexpected behaviors at templating level and makes
1436 # it harder to extract it in a standalone function. Its
1437 # behavior cannot be changed so leave it here for now.
1438 def showparents(**args):
1439 ctx = args['ctx']
1440 parents = [[('rev', p.rev()),
1441 ('node', p.hex()),
1442 ('phase', p.phasestr())]
1443 for p in scmutil.meaningfulparents(self.repo, ctx)]
1444 return showlist('parent', parents, **args)
1445
1446 1431 props = props.copy()
1447 1432 props.update(templatekw.keywords)
1448 props['parents'] = showparents
1449 1433 props['templ'] = self.t
1450 1434 props['ctx'] = ctx
1451 1435 props['repo'] = self.repo
1452 1436 props['revcache'] = {'copies': copies}
1453 1437 props['cache'] = self.cache
1454 1438
1455 1439 try:
1456 1440 # write header
1457 1441 if self._parts['header']:
1458 1442 h = templater.stringify(self.t(self._parts['header'], **props))
1459 1443 if self.buffered:
1460 1444 self.header[ctx.rev()] = h
1461 1445 else:
1462 1446 if self.lastheader != h:
1463 1447 self.lastheader = h
1464 1448 self.ui.write(h)
1465 1449
1466 1450 # write changeset metadata, then patch if requested
1467 1451 key = self._parts['changeset']
1468 1452 self.ui.write(templater.stringify(self.t(key, **props)))
1469 1453 self.showpatch(ctx.node(), matchfn)
1470 1454
1471 1455 if self._parts['footer']:
1472 1456 if not self.footer:
1473 1457 self.footer = templater.stringify(
1474 1458 self.t(self._parts['footer'], **props))
1475 1459 except KeyError as inst:
1476 1460 msg = _("%s: no key named '%s'")
1477 1461 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
1478 1462 except SyntaxError as inst:
1479 1463 raise util.Abort('%s: %s' % (self.t.mapfile, inst.args[0]))
1480 1464
1481 1465 def gettemplate(ui, tmpl, style):
1482 1466 """
1483 1467 Find the template matching the given template spec or style.
1484 1468 """
1485 1469
1486 1470 # ui settings
1487 1471 if not tmpl and not style: # template are stronger than style
1488 1472 tmpl = ui.config('ui', 'logtemplate')
1489 1473 if tmpl:
1490 1474 try:
1491 1475 tmpl = templater.unquotestring(tmpl)
1492 1476 except SyntaxError:
1493 1477 pass
1494 1478 return tmpl, None
1495 1479 else:
1496 1480 style = util.expandpath(ui.config('ui', 'style', ''))
1497 1481
1498 1482 if not tmpl and style:
1499 1483 mapfile = style
1500 1484 if not os.path.split(mapfile)[0]:
1501 1485 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1502 1486 or templater.templatepath(mapfile))
1503 1487 if mapname:
1504 1488 mapfile = mapname
1505 1489 return None, mapfile
1506 1490
1507 1491 if not tmpl:
1508 1492 return None, None
1509 1493
1510 1494 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1511 1495
1512 1496 def show_changeset(ui, repo, opts, buffered=False):
1513 1497 """show one changeset using template or regular display.
1514 1498
1515 1499 Display format will be the first non-empty hit of:
1516 1500 1. option 'template'
1517 1501 2. option 'style'
1518 1502 3. [ui] setting 'logtemplate'
1519 1503 4. [ui] setting 'style'
1520 1504 If all of these values are either the unset or the empty string,
1521 1505 regular display via changeset_printer() is done.
1522 1506 """
1523 1507 # options
1524 1508 matchfn = None
1525 1509 if opts.get('patch') or opts.get('stat'):
1526 1510 matchfn = scmutil.matchall(repo)
1527 1511
1528 1512 if opts.get('template') == 'json':
1529 1513 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1530 1514
1531 1515 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1532 1516
1533 1517 if not tmpl and not mapfile:
1534 1518 return changeset_printer(ui, repo, matchfn, opts, buffered)
1535 1519
1536 1520 try:
1537 1521 t = changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile,
1538 1522 buffered)
1539 1523 except SyntaxError as inst:
1540 1524 raise util.Abort(inst.args[0])
1541 1525 return t
1542 1526
1543 1527 def showmarker(ui, marker):
1544 1528 """utility function to display obsolescence marker in a readable way
1545 1529
1546 1530 To be used by debug function."""
1547 1531 ui.write(hex(marker.precnode()))
1548 1532 for repl in marker.succnodes():
1549 1533 ui.write(' ')
1550 1534 ui.write(hex(repl))
1551 1535 ui.write(' %X ' % marker.flags())
1552 1536 parents = marker.parentnodes()
1553 1537 if parents is not None:
1554 1538 ui.write('{%s} ' % ', '.join(hex(p) for p in parents))
1555 1539 ui.write('(%s) ' % util.datestr(marker.date()))
1556 1540 ui.write('{%s}' % (', '.join('%r: %r' % t for t in
1557 1541 sorted(marker.metadata().items())
1558 1542 if t[0] != 'date')))
1559 1543 ui.write('\n')
1560 1544
1561 1545 def finddate(ui, repo, date):
1562 1546 """Find the tipmost changeset that matches the given date spec"""
1563 1547
1564 1548 df = util.matchdate(date)
1565 1549 m = scmutil.matchall(repo)
1566 1550 results = {}
1567 1551
1568 1552 def prep(ctx, fns):
1569 1553 d = ctx.date()
1570 1554 if df(d[0]):
1571 1555 results[ctx.rev()] = d
1572 1556
1573 1557 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1574 1558 rev = ctx.rev()
1575 1559 if rev in results:
1576 1560 ui.status(_("found revision %s from %s\n") %
1577 1561 (rev, util.datestr(results[rev])))
1578 1562 return str(rev)
1579 1563
1580 1564 raise util.Abort(_("revision matching date not found"))
1581 1565
1582 1566 def increasingwindows(windowsize=8, sizelimit=512):
1583 1567 while True:
1584 1568 yield windowsize
1585 1569 if windowsize < sizelimit:
1586 1570 windowsize *= 2
1587 1571
1588 1572 class FileWalkError(Exception):
1589 1573 pass
1590 1574
1591 1575 def walkfilerevs(repo, match, follow, revs, fncache):
1592 1576 '''Walks the file history for the matched files.
1593 1577
1594 1578 Returns the changeset revs that are involved in the file history.
1595 1579
1596 1580 Throws FileWalkError if the file history can't be walked using
1597 1581 filelogs alone.
1598 1582 '''
1599 1583 wanted = set()
1600 1584 copies = []
1601 1585 minrev, maxrev = min(revs), max(revs)
1602 1586 def filerevgen(filelog, last):
1603 1587 """
1604 1588 Only files, no patterns. Check the history of each file.
1605 1589
1606 1590 Examines filelog entries within minrev, maxrev linkrev range
1607 1591 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1608 1592 tuples in backwards order
1609 1593 """
1610 1594 cl_count = len(repo)
1611 1595 revs = []
1612 1596 for j in xrange(0, last + 1):
1613 1597 linkrev = filelog.linkrev(j)
1614 1598 if linkrev < minrev:
1615 1599 continue
1616 1600 # only yield rev for which we have the changelog, it can
1617 1601 # happen while doing "hg log" during a pull or commit
1618 1602 if linkrev >= cl_count:
1619 1603 break
1620 1604
1621 1605 parentlinkrevs = []
1622 1606 for p in filelog.parentrevs(j):
1623 1607 if p != nullrev:
1624 1608 parentlinkrevs.append(filelog.linkrev(p))
1625 1609 n = filelog.node(j)
1626 1610 revs.append((linkrev, parentlinkrevs,
1627 1611 follow and filelog.renamed(n)))
1628 1612
1629 1613 return reversed(revs)
1630 1614 def iterfiles():
1631 1615 pctx = repo['.']
1632 1616 for filename in match.files():
1633 1617 if follow:
1634 1618 if filename not in pctx:
1635 1619 raise util.Abort(_('cannot follow file not in parent '
1636 1620 'revision: "%s"') % filename)
1637 1621 yield filename, pctx[filename].filenode()
1638 1622 else:
1639 1623 yield filename, None
1640 1624 for filename_node in copies:
1641 1625 yield filename_node
1642 1626
1643 1627 for file_, node in iterfiles():
1644 1628 filelog = repo.file(file_)
1645 1629 if not len(filelog):
1646 1630 if node is None:
1647 1631 # A zero count may be a directory or deleted file, so
1648 1632 # try to find matching entries on the slow path.
1649 1633 if follow:
1650 1634 raise util.Abort(
1651 1635 _('cannot follow nonexistent file: "%s"') % file_)
1652 1636 raise FileWalkError("Cannot walk via filelog")
1653 1637 else:
1654 1638 continue
1655 1639
1656 1640 if node is None:
1657 1641 last = len(filelog) - 1
1658 1642 else:
1659 1643 last = filelog.rev(node)
1660 1644
1661 1645 # keep track of all ancestors of the file
1662 1646 ancestors = set([filelog.linkrev(last)])
1663 1647
1664 1648 # iterate from latest to oldest revision
1665 1649 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1666 1650 if not follow:
1667 1651 if rev > maxrev:
1668 1652 continue
1669 1653 else:
1670 1654 # Note that last might not be the first interesting
1671 1655 # rev to us:
1672 1656 # if the file has been changed after maxrev, we'll
1673 1657 # have linkrev(last) > maxrev, and we still need
1674 1658 # to explore the file graph
1675 1659 if rev not in ancestors:
1676 1660 continue
1677 1661 # XXX insert 1327 fix here
1678 1662 if flparentlinkrevs:
1679 1663 ancestors.update(flparentlinkrevs)
1680 1664
1681 1665 fncache.setdefault(rev, []).append(file_)
1682 1666 wanted.add(rev)
1683 1667 if copied:
1684 1668 copies.append(copied)
1685 1669
1686 1670 return wanted
1687 1671
1688 1672 class _followfilter(object):
1689 1673 def __init__(self, repo, onlyfirst=False):
1690 1674 self.repo = repo
1691 1675 self.startrev = nullrev
1692 1676 self.roots = set()
1693 1677 self.onlyfirst = onlyfirst
1694 1678
1695 1679 def match(self, rev):
1696 1680 def realparents(rev):
1697 1681 if self.onlyfirst:
1698 1682 return self.repo.changelog.parentrevs(rev)[0:1]
1699 1683 else:
1700 1684 return filter(lambda x: x != nullrev,
1701 1685 self.repo.changelog.parentrevs(rev))
1702 1686
1703 1687 if self.startrev == nullrev:
1704 1688 self.startrev = rev
1705 1689 return True
1706 1690
1707 1691 if rev > self.startrev:
1708 1692 # forward: all descendants
1709 1693 if not self.roots:
1710 1694 self.roots.add(self.startrev)
1711 1695 for parent in realparents(rev):
1712 1696 if parent in self.roots:
1713 1697 self.roots.add(rev)
1714 1698 return True
1715 1699 else:
1716 1700 # backwards: all parents
1717 1701 if not self.roots:
1718 1702 self.roots.update(realparents(self.startrev))
1719 1703 if rev in self.roots:
1720 1704 self.roots.remove(rev)
1721 1705 self.roots.update(realparents(rev))
1722 1706 return True
1723 1707
1724 1708 return False
1725 1709
1726 1710 def walkchangerevs(repo, match, opts, prepare):
1727 1711 '''Iterate over files and the revs in which they changed.
1728 1712
1729 1713 Callers most commonly need to iterate backwards over the history
1730 1714 in which they are interested. Doing so has awful (quadratic-looking)
1731 1715 performance, so we use iterators in a "windowed" way.
1732 1716
1733 1717 We walk a window of revisions in the desired order. Within the
1734 1718 window, we first walk forwards to gather data, then in the desired
1735 1719 order (usually backwards) to display it.
1736 1720
1737 1721 This function returns an iterator yielding contexts. Before
1738 1722 yielding each context, the iterator will first call the prepare
1739 1723 function on each context in the window in forward order.'''
1740 1724
1741 1725 follow = opts.get('follow') or opts.get('follow_first')
1742 1726 revs = _logrevs(repo, opts)
1743 1727 if not revs:
1744 1728 return []
1745 1729 wanted = set()
1746 1730 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1747 1731 opts.get('removed'))
1748 1732 fncache = {}
1749 1733 change = repo.changectx
1750 1734
1751 1735 # First step is to fill wanted, the set of revisions that we want to yield.
1752 1736 # When it does not induce extra cost, we also fill fncache for revisions in
1753 1737 # wanted: a cache of filenames that were changed (ctx.files()) and that
1754 1738 # match the file filtering conditions.
1755 1739
1756 1740 if match.always():
1757 1741 # No files, no patterns. Display all revs.
1758 1742 wanted = revs
1759 1743 elif not slowpath:
1760 1744 # We only have to read through the filelog to find wanted revisions
1761 1745
1762 1746 try:
1763 1747 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1764 1748 except FileWalkError:
1765 1749 slowpath = True
1766 1750
1767 1751 # We decided to fall back to the slowpath because at least one
1768 1752 # of the paths was not a file. Check to see if at least one of them
1769 1753 # existed in history, otherwise simply return
1770 1754 for path in match.files():
1771 1755 if path == '.' or path in repo.store:
1772 1756 break
1773 1757 else:
1774 1758 return []
1775 1759
1776 1760 if slowpath:
1777 1761 # We have to read the changelog to match filenames against
1778 1762 # changed files
1779 1763
1780 1764 if follow:
1781 1765 raise util.Abort(_('can only follow copies/renames for explicit '
1782 1766 'filenames'))
1783 1767
1784 1768 # The slow path checks files modified in every changeset.
1785 1769 # This is really slow on large repos, so compute the set lazily.
1786 1770 class lazywantedset(object):
1787 1771 def __init__(self):
1788 1772 self.set = set()
1789 1773 self.revs = set(revs)
1790 1774
1791 1775 # No need to worry about locality here because it will be accessed
1792 1776 # in the same order as the increasing window below.
1793 1777 def __contains__(self, value):
1794 1778 if value in self.set:
1795 1779 return True
1796 1780 elif not value in self.revs:
1797 1781 return False
1798 1782 else:
1799 1783 self.revs.discard(value)
1800 1784 ctx = change(value)
1801 1785 matches = filter(match, ctx.files())
1802 1786 if matches:
1803 1787 fncache[value] = matches
1804 1788 self.set.add(value)
1805 1789 return True
1806 1790 return False
1807 1791
1808 1792 def discard(self, value):
1809 1793 self.revs.discard(value)
1810 1794 self.set.discard(value)
1811 1795
1812 1796 wanted = lazywantedset()
1813 1797
1814 1798 # it might be worthwhile to do this in the iterator if the rev range
1815 1799 # is descending and the prune args are all within that range
1816 1800 for rev in opts.get('prune', ()):
1817 1801 rev = repo[rev].rev()
1818 1802 ff = _followfilter(repo)
1819 1803 stop = min(revs[0], revs[-1])
1820 1804 for x in xrange(rev, stop - 1, -1):
1821 1805 if ff.match(x):
1822 1806 wanted = wanted - [x]
1823 1807
1824 1808 # Now that wanted is correctly initialized, we can iterate over the
1825 1809 # revision range, yielding only revisions in wanted.
1826 1810 def iterate():
1827 1811 if follow and match.always():
1828 1812 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1829 1813 def want(rev):
1830 1814 return ff.match(rev) and rev in wanted
1831 1815 else:
1832 1816 def want(rev):
1833 1817 return rev in wanted
1834 1818
1835 1819 it = iter(revs)
1836 1820 stopiteration = False
1837 1821 for windowsize in increasingwindows():
1838 1822 nrevs = []
1839 1823 for i in xrange(windowsize):
1840 1824 rev = next(it, None)
1841 1825 if rev is None:
1842 1826 stopiteration = True
1843 1827 break
1844 1828 elif want(rev):
1845 1829 nrevs.append(rev)
1846 1830 for rev in sorted(nrevs):
1847 1831 fns = fncache.get(rev)
1848 1832 ctx = change(rev)
1849 1833 if not fns:
1850 1834 def fns_generator():
1851 1835 for f in ctx.files():
1852 1836 if match(f):
1853 1837 yield f
1854 1838 fns = fns_generator()
1855 1839 prepare(ctx, fns)
1856 1840 for rev in nrevs:
1857 1841 yield change(rev)
1858 1842
1859 1843 if stopiteration:
1860 1844 break
1861 1845
1862 1846 return iterate()
1863 1847
1864 1848 def _makefollowlogfilematcher(repo, files, followfirst):
1865 1849 # When displaying a revision with --patch --follow FILE, we have
1866 1850 # to know which file of the revision must be diffed. With
1867 1851 # --follow, we want the names of the ancestors of FILE in the
1868 1852 # revision, stored in "fcache". "fcache" is populated by
1869 1853 # reproducing the graph traversal already done by --follow revset
1870 1854 # and relating linkrevs to file names (which is not "correct" but
1871 1855 # good enough).
1872 1856 fcache = {}
1873 1857 fcacheready = [False]
1874 1858 pctx = repo['.']
1875 1859
1876 1860 def populate():
1877 1861 for fn in files:
1878 1862 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
1879 1863 for c in i:
1880 1864 fcache.setdefault(c.linkrev(), set()).add(c.path())
1881 1865
1882 1866 def filematcher(rev):
1883 1867 if not fcacheready[0]:
1884 1868 # Lazy initialization
1885 1869 fcacheready[0] = True
1886 1870 populate()
1887 1871 return scmutil.matchfiles(repo, fcache.get(rev, []))
1888 1872
1889 1873 return filematcher
1890 1874
1891 1875 def _makenofollowlogfilematcher(repo, pats, opts):
1892 1876 '''hook for extensions to override the filematcher for non-follow cases'''
1893 1877 return None
1894 1878
1895 1879 def _makelogrevset(repo, pats, opts, revs):
1896 1880 """Return (expr, filematcher) where expr is a revset string built
1897 1881 from log options and file patterns or None. If --stat or --patch
1898 1882 are not passed filematcher is None. Otherwise it is a callable
1899 1883 taking a revision number and returning a match objects filtering
1900 1884 the files to be detailed when displaying the revision.
1901 1885 """
1902 1886 opt2revset = {
1903 1887 'no_merges': ('not merge()', None),
1904 1888 'only_merges': ('merge()', None),
1905 1889 '_ancestors': ('ancestors(%(val)s)', None),
1906 1890 '_fancestors': ('_firstancestors(%(val)s)', None),
1907 1891 '_descendants': ('descendants(%(val)s)', None),
1908 1892 '_fdescendants': ('_firstdescendants(%(val)s)', None),
1909 1893 '_matchfiles': ('_matchfiles(%(val)s)', None),
1910 1894 'date': ('date(%(val)r)', None),
1911 1895 'branch': ('branch(%(val)r)', ' or '),
1912 1896 '_patslog': ('filelog(%(val)r)', ' or '),
1913 1897 '_patsfollow': ('follow(%(val)r)', ' or '),
1914 1898 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
1915 1899 'keyword': ('keyword(%(val)r)', ' or '),
1916 1900 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
1917 1901 'user': ('user(%(val)r)', ' or '),
1918 1902 }
1919 1903
1920 1904 opts = dict(opts)
1921 1905 # follow or not follow?
1922 1906 follow = opts.get('follow') or opts.get('follow_first')
1923 1907 if opts.get('follow_first'):
1924 1908 followfirst = 1
1925 1909 else:
1926 1910 followfirst = 0
1927 1911 # --follow with FILE behavior depends on revs...
1928 1912 it = iter(revs)
1929 1913 startrev = it.next()
1930 1914 followdescendants = startrev < next(it, startrev)
1931 1915
1932 1916 # branch and only_branch are really aliases and must be handled at
1933 1917 # the same time
1934 1918 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
1935 1919 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
1936 1920 # pats/include/exclude are passed to match.match() directly in
1937 1921 # _matchfiles() revset but walkchangerevs() builds its matcher with
1938 1922 # scmutil.match(). The difference is input pats are globbed on
1939 1923 # platforms without shell expansion (windows).
1940 1924 wctx = repo[None]
1941 1925 match, pats = scmutil.matchandpats(wctx, pats, opts)
1942 1926 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1943 1927 opts.get('removed'))
1944 1928 if not slowpath:
1945 1929 for f in match.files():
1946 1930 if follow and f not in wctx:
1947 1931 # If the file exists, it may be a directory, so let it
1948 1932 # take the slow path.
1949 1933 if os.path.exists(repo.wjoin(f)):
1950 1934 slowpath = True
1951 1935 continue
1952 1936 else:
1953 1937 raise util.Abort(_('cannot follow file not in parent '
1954 1938 'revision: "%s"') % f)
1955 1939 filelog = repo.file(f)
1956 1940 if not filelog:
1957 1941 # A zero count may be a directory or deleted file, so
1958 1942 # try to find matching entries on the slow path.
1959 1943 if follow:
1960 1944 raise util.Abort(
1961 1945 _('cannot follow nonexistent file: "%s"') % f)
1962 1946 slowpath = True
1963 1947
1964 1948 # We decided to fall back to the slowpath because at least one
1965 1949 # of the paths was not a file. Check to see if at least one of them
1966 1950 # existed in history - in that case, we'll continue down the
1967 1951 # slowpath; otherwise, we can turn off the slowpath
1968 1952 if slowpath:
1969 1953 for path in match.files():
1970 1954 if path == '.' or path in repo.store:
1971 1955 break
1972 1956 else:
1973 1957 slowpath = False
1974 1958
1975 1959 fpats = ('_patsfollow', '_patsfollowfirst')
1976 1960 fnopats = (('_ancestors', '_fancestors'),
1977 1961 ('_descendants', '_fdescendants'))
1978 1962 if slowpath:
1979 1963 # See walkchangerevs() slow path.
1980 1964 #
1981 1965 # pats/include/exclude cannot be represented as separate
1982 1966 # revset expressions as their filtering logic applies at file
1983 1967 # level. For instance "-I a -X a" matches a revision touching
1984 1968 # "a" and "b" while "file(a) and not file(b)" does
1985 1969 # not. Besides, filesets are evaluated against the working
1986 1970 # directory.
1987 1971 matchargs = ['r:', 'd:relpath']
1988 1972 for p in pats:
1989 1973 matchargs.append('p:' + p)
1990 1974 for p in opts.get('include', []):
1991 1975 matchargs.append('i:' + p)
1992 1976 for p in opts.get('exclude', []):
1993 1977 matchargs.append('x:' + p)
1994 1978 matchargs = ','.join(('%r' % p) for p in matchargs)
1995 1979 opts['_matchfiles'] = matchargs
1996 1980 if follow:
1997 1981 opts[fnopats[0][followfirst]] = '.'
1998 1982 else:
1999 1983 if follow:
2000 1984 if pats:
2001 1985 # follow() revset interprets its file argument as a
2002 1986 # manifest entry, so use match.files(), not pats.
2003 1987 opts[fpats[followfirst]] = list(match.files())
2004 1988 else:
2005 1989 op = fnopats[followdescendants][followfirst]
2006 1990 opts[op] = 'rev(%d)' % startrev
2007 1991 else:
2008 1992 opts['_patslog'] = list(pats)
2009 1993
2010 1994 filematcher = None
2011 1995 if opts.get('patch') or opts.get('stat'):
2012 1996 # When following files, track renames via a special matcher.
2013 1997 # If we're forced to take the slowpath it means we're following
2014 1998 # at least one pattern/directory, so don't bother with rename tracking.
2015 1999 if follow and not match.always() and not slowpath:
2016 2000 # _makefollowlogfilematcher expects its files argument to be
2017 2001 # relative to the repo root, so use match.files(), not pats.
2018 2002 filematcher = _makefollowlogfilematcher(repo, match.files(),
2019 2003 followfirst)
2020 2004 else:
2021 2005 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2022 2006 if filematcher is None:
2023 2007 filematcher = lambda rev: match
2024 2008
2025 2009 expr = []
2026 2010 for op, val in sorted(opts.iteritems()):
2027 2011 if not val:
2028 2012 continue
2029 2013 if op not in opt2revset:
2030 2014 continue
2031 2015 revop, andor = opt2revset[op]
2032 2016 if '%(val)' not in revop:
2033 2017 expr.append(revop)
2034 2018 else:
2035 2019 if not isinstance(val, list):
2036 2020 e = revop % {'val': val}
2037 2021 else:
2038 2022 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2039 2023 expr.append(e)
2040 2024
2041 2025 if expr:
2042 2026 expr = '(' + ' and '.join(expr) + ')'
2043 2027 else:
2044 2028 expr = None
2045 2029 return expr, filematcher
2046 2030
2047 2031 def _logrevs(repo, opts):
2048 2032 # Default --rev value depends on --follow but --follow behavior
2049 2033 # depends on revisions resolved from --rev...
2050 2034 follow = opts.get('follow') or opts.get('follow_first')
2051 2035 if opts.get('rev'):
2052 2036 revs = scmutil.revrange(repo, opts['rev'])
2053 2037 elif follow and repo.dirstate.p1() == nullid:
2054 2038 revs = revset.baseset()
2055 2039 elif follow:
2056 2040 revs = repo.revs('reverse(:.)')
2057 2041 else:
2058 2042 revs = revset.spanset(repo)
2059 2043 revs.reverse()
2060 2044 return revs
2061 2045
2062 2046 def getgraphlogrevs(repo, pats, opts):
2063 2047 """Return (revs, expr, filematcher) where revs is an iterable of
2064 2048 revision numbers, expr is a revset string built from log options
2065 2049 and file patterns or None, and used to filter 'revs'. If --stat or
2066 2050 --patch are not passed filematcher is None. Otherwise it is a
2067 2051 callable taking a revision number and returning a match objects
2068 2052 filtering the files to be detailed when displaying the revision.
2069 2053 """
2070 2054 limit = loglimit(opts)
2071 2055 revs = _logrevs(repo, opts)
2072 2056 if not revs:
2073 2057 return revset.baseset(), None, None
2074 2058 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2075 2059 if opts.get('rev'):
2076 2060 # User-specified revs might be unsorted, but don't sort before
2077 2061 # _makelogrevset because it might depend on the order of revs
2078 2062 revs.sort(reverse=True)
2079 2063 if expr:
2080 2064 # Revset matchers often operate faster on revisions in changelog
2081 2065 # order, because most filters deal with the changelog.
2082 2066 revs.reverse()
2083 2067 matcher = revset.match(repo.ui, expr)
2084 2068 # Revset matches can reorder revisions. "A or B" typically returns
2085 2069 # returns the revision matching A then the revision matching B. Sort
2086 2070 # again to fix that.
2087 2071 revs = matcher(repo, revs)
2088 2072 revs.sort(reverse=True)
2089 2073 if limit is not None:
2090 2074 limitedrevs = []
2091 2075 for idx, rev in enumerate(revs):
2092 2076 if idx >= limit:
2093 2077 break
2094 2078 limitedrevs.append(rev)
2095 2079 revs = revset.baseset(limitedrevs)
2096 2080
2097 2081 return revs, expr, filematcher
2098 2082
2099 2083 def getlogrevs(repo, pats, opts):
2100 2084 """Return (revs, expr, filematcher) where revs is an iterable of
2101 2085 revision numbers, expr is a revset string built from log options
2102 2086 and file patterns or None, and used to filter 'revs'. If --stat or
2103 2087 --patch are not passed filematcher is None. Otherwise it is a
2104 2088 callable taking a revision number and returning a match objects
2105 2089 filtering the files to be detailed when displaying the revision.
2106 2090 """
2107 2091 limit = loglimit(opts)
2108 2092 revs = _logrevs(repo, opts)
2109 2093 if not revs:
2110 2094 return revset.baseset([]), None, None
2111 2095 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2112 2096 if expr:
2113 2097 # Revset matchers often operate faster on revisions in changelog
2114 2098 # order, because most filters deal with the changelog.
2115 2099 if not opts.get('rev'):
2116 2100 revs.reverse()
2117 2101 matcher = revset.match(repo.ui, expr)
2118 2102 # Revset matches can reorder revisions. "A or B" typically returns
2119 2103 # returns the revision matching A then the revision matching B. Sort
2120 2104 # again to fix that.
2121 2105 revs = matcher(repo, revs)
2122 2106 if not opts.get('rev'):
2123 2107 revs.sort(reverse=True)
2124 2108 if limit is not None:
2125 2109 limitedrevs = []
2126 2110 for idx, r in enumerate(revs):
2127 2111 if limit <= idx:
2128 2112 break
2129 2113 limitedrevs.append(r)
2130 2114 revs = revset.baseset(limitedrevs)
2131 2115
2132 2116 return revs, expr, filematcher
2133 2117
2134 2118 def displaygraph(ui, dag, displayer, showparents, edgefn, getrenamed=None,
2135 2119 filematcher=None):
2136 2120 seen, state = [], graphmod.asciistate()
2137 2121 for rev, type, ctx, parents in dag:
2138 2122 char = 'o'
2139 2123 if ctx.node() in showparents:
2140 2124 char = '@'
2141 2125 elif ctx.obsolete():
2142 2126 char = 'x'
2143 2127 elif ctx.closesbranch():
2144 2128 char = '_'
2145 2129 copies = None
2146 2130 if getrenamed and ctx.rev():
2147 2131 copies = []
2148 2132 for fn in ctx.files():
2149 2133 rename = getrenamed(fn, ctx.rev())
2150 2134 if rename:
2151 2135 copies.append((fn, rename[0]))
2152 2136 revmatchfn = None
2153 2137 if filematcher is not None:
2154 2138 revmatchfn = filematcher(ctx.rev())
2155 2139 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2156 2140 lines = displayer.hunk.pop(rev).split('\n')
2157 2141 if not lines[-1]:
2158 2142 del lines[-1]
2159 2143 displayer.flush(ctx)
2160 2144 edges = edgefn(type, char, lines, seen, rev, parents)
2161 2145 for type, char, lines, coldata in edges:
2162 2146 graphmod.ascii(ui, state, type, char, lines, coldata)
2163 2147 displayer.close()
2164 2148
2165 2149 def graphlog(ui, repo, *pats, **opts):
2166 2150 # Parameters are identical to log command ones
2167 2151 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2168 2152 revdag = graphmod.dagwalker(repo, revs)
2169 2153
2170 2154 getrenamed = None
2171 2155 if opts.get('copies'):
2172 2156 endrev = None
2173 2157 if opts.get('rev'):
2174 2158 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2175 2159 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2176 2160 displayer = show_changeset(ui, repo, opts, buffered=True)
2177 2161 showparents = [ctx.node() for ctx in repo[None].parents()]
2178 2162 displaygraph(ui, revdag, displayer, showparents,
2179 2163 graphmod.asciiedges, getrenamed, filematcher)
2180 2164
2181 2165 def checkunsupportedgraphflags(pats, opts):
2182 2166 for op in ["newest_first"]:
2183 2167 if op in opts and opts[op]:
2184 2168 raise util.Abort(_("-G/--graph option is incompatible with --%s")
2185 2169 % op.replace("_", "-"))
2186 2170
2187 2171 def graphrevs(repo, nodes, opts):
2188 2172 limit = loglimit(opts)
2189 2173 nodes.reverse()
2190 2174 if limit is not None:
2191 2175 nodes = nodes[:limit]
2192 2176 return graphmod.nodes(repo, nodes)
2193 2177
2194 2178 def add(ui, repo, match, prefix, explicitonly, **opts):
2195 2179 join = lambda f: os.path.join(prefix, f)
2196 2180 bad = []
2197 2181
2198 2182 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2199 2183 names = []
2200 2184 wctx = repo[None]
2201 2185 cca = None
2202 2186 abort, warn = scmutil.checkportabilityalert(ui)
2203 2187 if abort or warn:
2204 2188 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2205 2189
2206 2190 badmatch = matchmod.badmatch(match, badfn)
2207 2191 dirstate = repo.dirstate
2208 2192 # We don't want to just call wctx.walk here, since it would return a lot of
2209 2193 # clean files, which we aren't interested in and takes time.
2210 2194 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2211 2195 True, False, full=False)):
2212 2196 exact = match.exact(f)
2213 2197 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2214 2198 if cca:
2215 2199 cca(f)
2216 2200 names.append(f)
2217 2201 if ui.verbose or not exact:
2218 2202 ui.status(_('adding %s\n') % match.rel(f))
2219 2203
2220 2204 for subpath in sorted(wctx.substate):
2221 2205 sub = wctx.sub(subpath)
2222 2206 try:
2223 2207 submatch = matchmod.narrowmatcher(subpath, match)
2224 2208 if opts.get('subrepos'):
2225 2209 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2226 2210 else:
2227 2211 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2228 2212 except error.LookupError:
2229 2213 ui.status(_("skipping missing subrepository: %s\n")
2230 2214 % join(subpath))
2231 2215
2232 2216 if not opts.get('dry_run'):
2233 2217 rejected = wctx.add(names, prefix)
2234 2218 bad.extend(f for f in rejected if f in match.files())
2235 2219 return bad
2236 2220
2237 2221 def forget(ui, repo, match, prefix, explicitonly):
2238 2222 join = lambda f: os.path.join(prefix, f)
2239 2223 bad = []
2240 2224 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2241 2225 wctx = repo[None]
2242 2226 forgot = []
2243 2227
2244 2228 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2245 2229 forget = sorted(s[0] + s[1] + s[3] + s[6])
2246 2230 if explicitonly:
2247 2231 forget = [f for f in forget if match.exact(f)]
2248 2232
2249 2233 for subpath in sorted(wctx.substate):
2250 2234 sub = wctx.sub(subpath)
2251 2235 try:
2252 2236 submatch = matchmod.narrowmatcher(subpath, match)
2253 2237 subbad, subforgot = sub.forget(submatch, prefix)
2254 2238 bad.extend([subpath + '/' + f for f in subbad])
2255 2239 forgot.extend([subpath + '/' + f for f in subforgot])
2256 2240 except error.LookupError:
2257 2241 ui.status(_("skipping missing subrepository: %s\n")
2258 2242 % join(subpath))
2259 2243
2260 2244 if not explicitonly:
2261 2245 for f in match.files():
2262 2246 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2263 2247 if f not in forgot:
2264 2248 if repo.wvfs.exists(f):
2265 2249 # Don't complain if the exact case match wasn't given.
2266 2250 # But don't do this until after checking 'forgot', so
2267 2251 # that subrepo files aren't normalized, and this op is
2268 2252 # purely from data cached by the status walk above.
2269 2253 if repo.dirstate.normalize(f) in repo.dirstate:
2270 2254 continue
2271 2255 ui.warn(_('not removing %s: '
2272 2256 'file is already untracked\n')
2273 2257 % match.rel(f))
2274 2258 bad.append(f)
2275 2259
2276 2260 for f in forget:
2277 2261 if ui.verbose or not match.exact(f):
2278 2262 ui.status(_('removing %s\n') % match.rel(f))
2279 2263
2280 2264 rejected = wctx.forget(forget, prefix)
2281 2265 bad.extend(f for f in rejected if f in match.files())
2282 2266 forgot.extend(f for f in forget if f not in rejected)
2283 2267 return bad, forgot
2284 2268
2285 2269 def files(ui, ctx, m, fm, fmt, subrepos):
2286 2270 rev = ctx.rev()
2287 2271 ret = 1
2288 2272 ds = ctx.repo().dirstate
2289 2273
2290 2274 for f in ctx.matches(m):
2291 2275 if rev is None and ds[f] == 'r':
2292 2276 continue
2293 2277 fm.startitem()
2294 2278 if ui.verbose:
2295 2279 fc = ctx[f]
2296 2280 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2297 2281 fm.data(abspath=f)
2298 2282 fm.write('path', fmt, m.rel(f))
2299 2283 ret = 0
2300 2284
2301 2285 for subpath in sorted(ctx.substate):
2302 2286 def matchessubrepo(subpath):
2303 2287 return (m.always() or m.exact(subpath)
2304 2288 or any(f.startswith(subpath + '/') for f in m.files()))
2305 2289
2306 2290 if subrepos or matchessubrepo(subpath):
2307 2291 sub = ctx.sub(subpath)
2308 2292 try:
2309 2293 submatch = matchmod.narrowmatcher(subpath, m)
2310 2294 if sub.printfiles(ui, submatch, fm, fmt, subrepos) == 0:
2311 2295 ret = 0
2312 2296 except error.LookupError:
2313 2297 ui.status(_("skipping missing subrepository: %s\n")
2314 2298 % m.abs(subpath))
2315 2299
2316 2300 return ret
2317 2301
2318 2302 def remove(ui, repo, m, prefix, after, force, subrepos):
2319 2303 join = lambda f: os.path.join(prefix, f)
2320 2304 ret = 0
2321 2305 s = repo.status(match=m, clean=True)
2322 2306 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2323 2307
2324 2308 wctx = repo[None]
2325 2309
2326 2310 for subpath in sorted(wctx.substate):
2327 2311 def matchessubrepo(matcher, subpath):
2328 2312 if matcher.exact(subpath):
2329 2313 return True
2330 2314 for f in matcher.files():
2331 2315 if f.startswith(subpath):
2332 2316 return True
2333 2317 return False
2334 2318
2335 2319 if subrepos or matchessubrepo(m, subpath):
2336 2320 sub = wctx.sub(subpath)
2337 2321 try:
2338 2322 submatch = matchmod.narrowmatcher(subpath, m)
2339 2323 if sub.removefiles(submatch, prefix, after, force, subrepos):
2340 2324 ret = 1
2341 2325 except error.LookupError:
2342 2326 ui.status(_("skipping missing subrepository: %s\n")
2343 2327 % join(subpath))
2344 2328
2345 2329 # warn about failure to delete explicit files/dirs
2346 2330 deleteddirs = util.dirs(deleted)
2347 2331 for f in m.files():
2348 2332 def insubrepo():
2349 2333 for subpath in wctx.substate:
2350 2334 if f.startswith(subpath):
2351 2335 return True
2352 2336 return False
2353 2337
2354 2338 isdir = f in deleteddirs or wctx.hasdir(f)
2355 2339 if f in repo.dirstate or isdir or f == '.' or insubrepo():
2356 2340 continue
2357 2341
2358 2342 if repo.wvfs.exists(f):
2359 2343 if repo.wvfs.isdir(f):
2360 2344 ui.warn(_('not removing %s: no tracked files\n')
2361 2345 % m.rel(f))
2362 2346 else:
2363 2347 ui.warn(_('not removing %s: file is untracked\n')
2364 2348 % m.rel(f))
2365 2349 # missing files will generate a warning elsewhere
2366 2350 ret = 1
2367 2351
2368 2352 if force:
2369 2353 list = modified + deleted + clean + added
2370 2354 elif after:
2371 2355 list = deleted
2372 2356 for f in modified + added + clean:
2373 2357 ui.warn(_('not removing %s: file still exists\n') % m.rel(f))
2374 2358 ret = 1
2375 2359 else:
2376 2360 list = deleted + clean
2377 2361 for f in modified:
2378 2362 ui.warn(_('not removing %s: file is modified (use -f'
2379 2363 ' to force removal)\n') % m.rel(f))
2380 2364 ret = 1
2381 2365 for f in added:
2382 2366 ui.warn(_('not removing %s: file has been marked for add'
2383 2367 ' (use forget to undo)\n') % m.rel(f))
2384 2368 ret = 1
2385 2369
2386 2370 for f in sorted(list):
2387 2371 if ui.verbose or not m.exact(f):
2388 2372 ui.status(_('removing %s\n') % m.rel(f))
2389 2373
2390 2374 wlock = repo.wlock()
2391 2375 try:
2392 2376 if not after:
2393 2377 for f in list:
2394 2378 if f in added:
2395 2379 continue # we never unlink added files on remove
2396 2380 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
2397 2381 repo[None].forget(list)
2398 2382 finally:
2399 2383 wlock.release()
2400 2384
2401 2385 return ret
2402 2386
2403 2387 def cat(ui, repo, ctx, matcher, prefix, **opts):
2404 2388 err = 1
2405 2389
2406 2390 def write(path):
2407 2391 fp = makefileobj(repo, opts.get('output'), ctx.node(),
2408 2392 pathname=os.path.join(prefix, path))
2409 2393 data = ctx[path].data()
2410 2394 if opts.get('decode'):
2411 2395 data = repo.wwritedata(path, data)
2412 2396 fp.write(data)
2413 2397 fp.close()
2414 2398
2415 2399 # Automation often uses hg cat on single files, so special case it
2416 2400 # for performance to avoid the cost of parsing the manifest.
2417 2401 if len(matcher.files()) == 1 and not matcher.anypats():
2418 2402 file = matcher.files()[0]
2419 2403 mf = repo.manifest
2420 2404 mfnode = ctx.manifestnode()
2421 2405 if mfnode and mf.find(mfnode, file)[0]:
2422 2406 write(file)
2423 2407 return 0
2424 2408
2425 2409 # Don't warn about "missing" files that are really in subrepos
2426 2410 def badfn(path, msg):
2427 2411 for subpath in ctx.substate:
2428 2412 if path.startswith(subpath):
2429 2413 return
2430 2414 matcher.bad(path, msg)
2431 2415
2432 2416 for abs in ctx.walk(matchmod.badmatch(matcher, badfn)):
2433 2417 write(abs)
2434 2418 err = 0
2435 2419
2436 2420 for subpath in sorted(ctx.substate):
2437 2421 sub = ctx.sub(subpath)
2438 2422 try:
2439 2423 submatch = matchmod.narrowmatcher(subpath, matcher)
2440 2424
2441 2425 if not sub.cat(submatch, os.path.join(prefix, sub._path),
2442 2426 **opts):
2443 2427 err = 0
2444 2428 except error.RepoLookupError:
2445 2429 ui.status(_("skipping missing subrepository: %s\n")
2446 2430 % os.path.join(prefix, subpath))
2447 2431
2448 2432 return err
2449 2433
2450 2434 def commit(ui, repo, commitfunc, pats, opts):
2451 2435 '''commit the specified files or all outstanding changes'''
2452 2436 date = opts.get('date')
2453 2437 if date:
2454 2438 opts['date'] = util.parsedate(date)
2455 2439 message = logmessage(ui, opts)
2456 2440 matcher = scmutil.match(repo[None], pats, opts)
2457 2441
2458 2442 # extract addremove carefully -- this function can be called from a command
2459 2443 # that doesn't support addremove
2460 2444 if opts.get('addremove'):
2461 2445 if scmutil.addremove(repo, matcher, "", opts) != 0:
2462 2446 raise util.Abort(
2463 2447 _("failed to mark all new/missing files as added/removed"))
2464 2448
2465 2449 return commitfunc(ui, repo, message, matcher, opts)
2466 2450
2467 2451 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2468 2452 # avoid cycle context -> subrepo -> cmdutil
2469 2453 import context
2470 2454
2471 2455 # amend will reuse the existing user if not specified, but the obsolete
2472 2456 # marker creation requires that the current user's name is specified.
2473 2457 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2474 2458 ui.username() # raise exception if username not set
2475 2459
2476 2460 ui.note(_('amending changeset %s\n') % old)
2477 2461 base = old.p1()
2478 2462 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2479 2463
2480 2464 wlock = dsguard = lock = newid = None
2481 2465 try:
2482 2466 wlock = repo.wlock()
2483 2467 dsguard = dirstateguard(repo, 'amend')
2484 2468 lock = repo.lock()
2485 2469 tr = repo.transaction('amend')
2486 2470 try:
2487 2471 # See if we got a message from -m or -l, if not, open the editor
2488 2472 # with the message of the changeset to amend
2489 2473 message = logmessage(ui, opts)
2490 2474 # ensure logfile does not conflict with later enforcement of the
2491 2475 # message. potential logfile content has been processed by
2492 2476 # `logmessage` anyway.
2493 2477 opts.pop('logfile')
2494 2478 # First, do a regular commit to record all changes in the working
2495 2479 # directory (if there are any)
2496 2480 ui.callhooks = False
2497 2481 activebookmark = repo._activebookmark
2498 2482 try:
2499 2483 repo._activebookmark = None
2500 2484 opts['message'] = 'temporary amend commit for %s' % old
2501 2485 node = commit(ui, repo, commitfunc, pats, opts)
2502 2486 finally:
2503 2487 repo._activebookmark = activebookmark
2504 2488 ui.callhooks = True
2505 2489 ctx = repo[node]
2506 2490
2507 2491 # Participating changesets:
2508 2492 #
2509 2493 # node/ctx o - new (intermediate) commit that contains changes
2510 2494 # | from working dir to go into amending commit
2511 2495 # | (or a workingctx if there were no changes)
2512 2496 # |
2513 2497 # old o - changeset to amend
2514 2498 # |
2515 2499 # base o - parent of amending changeset
2516 2500
2517 2501 # Update extra dict from amended commit (e.g. to preserve graft
2518 2502 # source)
2519 2503 extra.update(old.extra())
2520 2504
2521 2505 # Also update it from the intermediate commit or from the wctx
2522 2506 extra.update(ctx.extra())
2523 2507
2524 2508 if len(old.parents()) > 1:
2525 2509 # ctx.files() isn't reliable for merges, so fall back to the
2526 2510 # slower repo.status() method
2527 2511 files = set([fn for st in repo.status(base, old)[:3]
2528 2512 for fn in st])
2529 2513 else:
2530 2514 files = set(old.files())
2531 2515
2532 2516 # Second, we use either the commit we just did, or if there were no
2533 2517 # changes the parent of the working directory as the version of the
2534 2518 # files in the final amend commit
2535 2519 if node:
2536 2520 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2537 2521
2538 2522 user = ctx.user()
2539 2523 date = ctx.date()
2540 2524 # Recompute copies (avoid recording a -> b -> a)
2541 2525 copied = copies.pathcopies(base, ctx)
2542 2526 if old.p2:
2543 2527 copied.update(copies.pathcopies(old.p2(), ctx))
2544 2528
2545 2529 # Prune files which were reverted by the updates: if old
2546 2530 # introduced file X and our intermediate commit, node,
2547 2531 # renamed that file, then those two files are the same and
2548 2532 # we can discard X from our list of files. Likewise if X
2549 2533 # was deleted, it's no longer relevant
2550 2534 files.update(ctx.files())
2551 2535
2552 2536 def samefile(f):
2553 2537 if f in ctx.manifest():
2554 2538 a = ctx.filectx(f)
2555 2539 if f in base.manifest():
2556 2540 b = base.filectx(f)
2557 2541 return (not a.cmp(b)
2558 2542 and a.flags() == b.flags())
2559 2543 else:
2560 2544 return False
2561 2545 else:
2562 2546 return f not in base.manifest()
2563 2547 files = [f for f in files if not samefile(f)]
2564 2548
2565 2549 def filectxfn(repo, ctx_, path):
2566 2550 try:
2567 2551 fctx = ctx[path]
2568 2552 flags = fctx.flags()
2569 2553 mctx = context.memfilectx(repo,
2570 2554 fctx.path(), fctx.data(),
2571 2555 islink='l' in flags,
2572 2556 isexec='x' in flags,
2573 2557 copied=copied.get(path))
2574 2558 return mctx
2575 2559 except KeyError:
2576 2560 return None
2577 2561 else:
2578 2562 ui.note(_('copying changeset %s to %s\n') % (old, base))
2579 2563
2580 2564 # Use version of files as in the old cset
2581 2565 def filectxfn(repo, ctx_, path):
2582 2566 try:
2583 2567 return old.filectx(path)
2584 2568 except KeyError:
2585 2569 return None
2586 2570
2587 2571 user = opts.get('user') or old.user()
2588 2572 date = opts.get('date') or old.date()
2589 2573 editform = mergeeditform(old, 'commit.amend')
2590 2574 editor = getcommiteditor(editform=editform, **opts)
2591 2575 if not message:
2592 2576 editor = getcommiteditor(edit=True, editform=editform)
2593 2577 message = old.description()
2594 2578
2595 2579 pureextra = extra.copy()
2596 2580 extra['amend_source'] = old.hex()
2597 2581
2598 2582 new = context.memctx(repo,
2599 2583 parents=[base.node(), old.p2().node()],
2600 2584 text=message,
2601 2585 files=files,
2602 2586 filectxfn=filectxfn,
2603 2587 user=user,
2604 2588 date=date,
2605 2589 extra=extra,
2606 2590 editor=editor)
2607 2591
2608 2592 newdesc = changelog.stripdesc(new.description())
2609 2593 if ((not node)
2610 2594 and newdesc == old.description()
2611 2595 and user == old.user()
2612 2596 and date == old.date()
2613 2597 and pureextra == old.extra()):
2614 2598 # nothing changed. continuing here would create a new node
2615 2599 # anyway because of the amend_source noise.
2616 2600 #
2617 2601 # This not what we expect from amend.
2618 2602 return old.node()
2619 2603
2620 2604 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2621 2605 try:
2622 2606 if opts.get('secret'):
2623 2607 commitphase = 'secret'
2624 2608 else:
2625 2609 commitphase = old.phase()
2626 2610 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2627 2611 newid = repo.commitctx(new)
2628 2612 finally:
2629 2613 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2630 2614 if newid != old.node():
2631 2615 # Reroute the working copy parent to the new changeset
2632 2616 repo.setparents(newid, nullid)
2633 2617
2634 2618 # Move bookmarks from old parent to amend commit
2635 2619 bms = repo.nodebookmarks(old.node())
2636 2620 if bms:
2637 2621 marks = repo._bookmarks
2638 2622 for bm in bms:
2639 2623 ui.debug('moving bookmarks %r from %s to %s\n' %
2640 2624 (marks, old.hex(), hex(newid)))
2641 2625 marks[bm] = newid
2642 2626 marks.recordchange(tr)
2643 2627 #commit the whole amend process
2644 2628 if createmarkers:
2645 2629 # mark the new changeset as successor of the rewritten one
2646 2630 new = repo[newid]
2647 2631 obs = [(old, (new,))]
2648 2632 if node:
2649 2633 obs.append((ctx, ()))
2650 2634
2651 2635 obsolete.createmarkers(repo, obs)
2652 2636 tr.close()
2653 2637 finally:
2654 2638 tr.release()
2655 2639 dsguard.close()
2656 2640 if not createmarkers and newid != old.node():
2657 2641 # Strip the intermediate commit (if there was one) and the amended
2658 2642 # commit
2659 2643 if node:
2660 2644 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2661 2645 ui.note(_('stripping amended changeset %s\n') % old)
2662 2646 repair.strip(ui, repo, old.node(), topic='amend-backup')
2663 2647 finally:
2664 2648 lockmod.release(lock, dsguard, wlock)
2665 2649 return newid
2666 2650
2667 2651 def commiteditor(repo, ctx, subs, editform=''):
2668 2652 if ctx.description():
2669 2653 return ctx.description()
2670 2654 return commitforceeditor(repo, ctx, subs, editform=editform)
2671 2655
2672 2656 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2673 2657 editform=''):
2674 2658 if not extramsg:
2675 2659 extramsg = _("Leave message empty to abort commit.")
2676 2660
2677 2661 forms = [e for e in editform.split('.') if e]
2678 2662 forms.insert(0, 'changeset')
2679 2663 while forms:
2680 2664 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2681 2665 if tmpl:
2682 2666 committext = buildcommittemplate(repo, ctx, subs, extramsg, tmpl)
2683 2667 break
2684 2668 forms.pop()
2685 2669 else:
2686 2670 committext = buildcommittext(repo, ctx, subs, extramsg)
2687 2671
2688 2672 # run editor in the repository root
2689 2673 olddir = os.getcwd()
2690 2674 os.chdir(repo.root)
2691 2675 text = repo.ui.edit(committext, ctx.user(), ctx.extra(), editform=editform)
2692 2676 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2693 2677 os.chdir(olddir)
2694 2678
2695 2679 if finishdesc:
2696 2680 text = finishdesc(text)
2697 2681 if not text.strip():
2698 2682 raise util.Abort(_("empty commit message"))
2699 2683
2700 2684 return text
2701 2685
2702 2686 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2703 2687 ui = repo.ui
2704 2688 tmpl, mapfile = gettemplate(ui, tmpl, None)
2705 2689
2706 2690 try:
2707 2691 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2708 2692 except SyntaxError as inst:
2709 2693 raise util.Abort(inst.args[0])
2710 2694
2711 2695 for k, v in repo.ui.configitems('committemplate'):
2712 2696 if k != 'changeset':
2713 2697 t.t.cache[k] = v
2714 2698
2715 2699 if not extramsg:
2716 2700 extramsg = '' # ensure that extramsg is string
2717 2701
2718 2702 ui.pushbuffer()
2719 2703 t.show(ctx, extramsg=extramsg)
2720 2704 return ui.popbuffer()
2721 2705
2722 2706 def hgprefix(msg):
2723 2707 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2724 2708
2725 2709 def buildcommittext(repo, ctx, subs, extramsg):
2726 2710 edittext = []
2727 2711 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2728 2712 if ctx.description():
2729 2713 edittext.append(ctx.description())
2730 2714 edittext.append("")
2731 2715 edittext.append("") # Empty line between message and comments.
2732 2716 edittext.append(hgprefix(_("Enter commit message."
2733 2717 " Lines beginning with 'HG:' are removed.")))
2734 2718 edittext.append(hgprefix(extramsg))
2735 2719 edittext.append("HG: --")
2736 2720 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2737 2721 if ctx.p2():
2738 2722 edittext.append(hgprefix(_("branch merge")))
2739 2723 if ctx.branch():
2740 2724 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2741 2725 if bookmarks.isactivewdirparent(repo):
2742 2726 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2743 2727 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2744 2728 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2745 2729 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2746 2730 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2747 2731 if not added and not modified and not removed:
2748 2732 edittext.append(hgprefix(_("no files changed")))
2749 2733 edittext.append("")
2750 2734
2751 2735 return "\n".join(edittext)
2752 2736
2753 2737 def commitstatus(repo, node, branch, bheads=None, opts=None):
2754 2738 if opts is None:
2755 2739 opts = {}
2756 2740 ctx = repo[node]
2757 2741 parents = ctx.parents()
2758 2742
2759 2743 if (not opts.get('amend') and bheads and node not in bheads and not
2760 2744 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2761 2745 repo.ui.status(_('created new head\n'))
2762 2746 # The message is not printed for initial roots. For the other
2763 2747 # changesets, it is printed in the following situations:
2764 2748 #
2765 2749 # Par column: for the 2 parents with ...
2766 2750 # N: null or no parent
2767 2751 # B: parent is on another named branch
2768 2752 # C: parent is a regular non head changeset
2769 2753 # H: parent was a branch head of the current branch
2770 2754 # Msg column: whether we print "created new head" message
2771 2755 # In the following, it is assumed that there already exists some
2772 2756 # initial branch heads of the current branch, otherwise nothing is
2773 2757 # printed anyway.
2774 2758 #
2775 2759 # Par Msg Comment
2776 2760 # N N y additional topo root
2777 2761 #
2778 2762 # B N y additional branch root
2779 2763 # C N y additional topo head
2780 2764 # H N n usual case
2781 2765 #
2782 2766 # B B y weird additional branch root
2783 2767 # C B y branch merge
2784 2768 # H B n merge with named branch
2785 2769 #
2786 2770 # C C y additional head from merge
2787 2771 # C H n merge with a head
2788 2772 #
2789 2773 # H H n head merge: head count decreases
2790 2774
2791 2775 if not opts.get('close_branch'):
2792 2776 for r in parents:
2793 2777 if r.closesbranch() and r.branch() == branch:
2794 2778 repo.ui.status(_('reopening closed branch head %d\n') % r)
2795 2779
2796 2780 if repo.ui.debugflag:
2797 2781 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2798 2782 elif repo.ui.verbose:
2799 2783 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2800 2784
2801 2785 def revert(ui, repo, ctx, parents, *pats, **opts):
2802 2786 parent, p2 = parents
2803 2787 node = ctx.node()
2804 2788
2805 2789 mf = ctx.manifest()
2806 2790 if node == p2:
2807 2791 parent = p2
2808 2792 if node == parent:
2809 2793 pmf = mf
2810 2794 else:
2811 2795 pmf = None
2812 2796
2813 2797 # need all matching names in dirstate and manifest of target rev,
2814 2798 # so have to walk both. do not print errors if files exist in one
2815 2799 # but not other. in both cases, filesets should be evaluated against
2816 2800 # workingctx to get consistent result (issue4497). this means 'set:**'
2817 2801 # cannot be used to select missing files from target rev.
2818 2802
2819 2803 # `names` is a mapping for all elements in working copy and target revision
2820 2804 # The mapping is in the form:
2821 2805 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2822 2806 names = {}
2823 2807
2824 2808 wlock = repo.wlock()
2825 2809 try:
2826 2810 ## filling of the `names` mapping
2827 2811 # walk dirstate to fill `names`
2828 2812
2829 2813 interactive = opts.get('interactive', False)
2830 2814 wctx = repo[None]
2831 2815 m = scmutil.match(wctx, pats, opts)
2832 2816
2833 2817 # we'll need this later
2834 2818 targetsubs = sorted(s for s in wctx.substate if m(s))
2835 2819
2836 2820 if not m.always():
2837 2821 for abs in repo.walk(matchmod.badmatch(m, lambda x, y: False)):
2838 2822 names[abs] = m.rel(abs), m.exact(abs)
2839 2823
2840 2824 # walk target manifest to fill `names`
2841 2825
2842 2826 def badfn(path, msg):
2843 2827 if path in names:
2844 2828 return
2845 2829 if path in ctx.substate:
2846 2830 return
2847 2831 path_ = path + '/'
2848 2832 for f in names:
2849 2833 if f.startswith(path_):
2850 2834 return
2851 2835 ui.warn("%s: %s\n" % (m.rel(path), msg))
2852 2836
2853 2837 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
2854 2838 if abs not in names:
2855 2839 names[abs] = m.rel(abs), m.exact(abs)
2856 2840
2857 2841 # Find status of all file in `names`.
2858 2842 m = scmutil.matchfiles(repo, names)
2859 2843
2860 2844 changes = repo.status(node1=node, match=m,
2861 2845 unknown=True, ignored=True, clean=True)
2862 2846 else:
2863 2847 changes = repo.status(node1=node, match=m)
2864 2848 for kind in changes:
2865 2849 for abs in kind:
2866 2850 names[abs] = m.rel(abs), m.exact(abs)
2867 2851
2868 2852 m = scmutil.matchfiles(repo, names)
2869 2853
2870 2854 modified = set(changes.modified)
2871 2855 added = set(changes.added)
2872 2856 removed = set(changes.removed)
2873 2857 _deleted = set(changes.deleted)
2874 2858 unknown = set(changes.unknown)
2875 2859 unknown.update(changes.ignored)
2876 2860 clean = set(changes.clean)
2877 2861 modadded = set()
2878 2862
2879 2863 # split between files known in target manifest and the others
2880 2864 smf = set(mf)
2881 2865
2882 2866 # determine the exact nature of the deleted changesets
2883 2867 deladded = _deleted - smf
2884 2868 deleted = _deleted - deladded
2885 2869
2886 2870 # We need to account for the state of the file in the dirstate,
2887 2871 # even when we revert against something else than parent. This will
2888 2872 # slightly alter the behavior of revert (doing back up or not, delete
2889 2873 # or just forget etc).
2890 2874 if parent == node:
2891 2875 dsmodified = modified
2892 2876 dsadded = added
2893 2877 dsremoved = removed
2894 2878 # store all local modifications, useful later for rename detection
2895 2879 localchanges = dsmodified | dsadded
2896 2880 modified, added, removed = set(), set(), set()
2897 2881 else:
2898 2882 changes = repo.status(node1=parent, match=m)
2899 2883 dsmodified = set(changes.modified)
2900 2884 dsadded = set(changes.added)
2901 2885 dsremoved = set(changes.removed)
2902 2886 # store all local modifications, useful later for rename detection
2903 2887 localchanges = dsmodified | dsadded
2904 2888
2905 2889 # only take into account for removes between wc and target
2906 2890 clean |= dsremoved - removed
2907 2891 dsremoved &= removed
2908 2892 # distinct between dirstate remove and other
2909 2893 removed -= dsremoved
2910 2894
2911 2895 modadded = added & dsmodified
2912 2896 added -= modadded
2913 2897
2914 2898 # tell newly modified apart.
2915 2899 dsmodified &= modified
2916 2900 dsmodified |= modified & dsadded # dirstate added may needs backup
2917 2901 modified -= dsmodified
2918 2902
2919 2903 # We need to wait for some post-processing to update this set
2920 2904 # before making the distinction. The dirstate will be used for
2921 2905 # that purpose.
2922 2906 dsadded = added
2923 2907
2924 2908 # in case of merge, files that are actually added can be reported as
2925 2909 # modified, we need to post process the result
2926 2910 if p2 != nullid:
2927 2911 if pmf is None:
2928 2912 # only need parent manifest in the merge case,
2929 2913 # so do not read by default
2930 2914 pmf = repo[parent].manifest()
2931 2915 mergeadd = dsmodified - set(pmf)
2932 2916 dsadded |= mergeadd
2933 2917 dsmodified -= mergeadd
2934 2918
2935 2919 # if f is a rename, update `names` to also revert the source
2936 2920 cwd = repo.getcwd()
2937 2921 for f in localchanges:
2938 2922 src = repo.dirstate.copied(f)
2939 2923 # XXX should we check for rename down to target node?
2940 2924 if src and src not in names and repo.dirstate[src] == 'r':
2941 2925 dsremoved.add(src)
2942 2926 names[src] = (repo.pathto(src, cwd), True)
2943 2927
2944 2928 # distinguish between file to forget and the other
2945 2929 added = set()
2946 2930 for abs in dsadded:
2947 2931 if repo.dirstate[abs] != 'a':
2948 2932 added.add(abs)
2949 2933 dsadded -= added
2950 2934
2951 2935 for abs in deladded:
2952 2936 if repo.dirstate[abs] == 'a':
2953 2937 dsadded.add(abs)
2954 2938 deladded -= dsadded
2955 2939
2956 2940 # For files marked as removed, we check if an unknown file is present at
2957 2941 # the same path. If a such file exists it may need to be backed up.
2958 2942 # Making the distinction at this stage helps have simpler backup
2959 2943 # logic.
2960 2944 removunk = set()
2961 2945 for abs in removed:
2962 2946 target = repo.wjoin(abs)
2963 2947 if os.path.lexists(target):
2964 2948 removunk.add(abs)
2965 2949 removed -= removunk
2966 2950
2967 2951 dsremovunk = set()
2968 2952 for abs in dsremoved:
2969 2953 target = repo.wjoin(abs)
2970 2954 if os.path.lexists(target):
2971 2955 dsremovunk.add(abs)
2972 2956 dsremoved -= dsremovunk
2973 2957
2974 2958 # action to be actually performed by revert
2975 2959 # (<list of file>, message>) tuple
2976 2960 actions = {'revert': ([], _('reverting %s\n')),
2977 2961 'add': ([], _('adding %s\n')),
2978 2962 'remove': ([], _('removing %s\n')),
2979 2963 'drop': ([], _('removing %s\n')),
2980 2964 'forget': ([], _('forgetting %s\n')),
2981 2965 'undelete': ([], _('undeleting %s\n')),
2982 2966 'noop': (None, _('no changes needed to %s\n')),
2983 2967 'unknown': (None, _('file not managed: %s\n')),
2984 2968 }
2985 2969
2986 2970 # "constant" that convey the backup strategy.
2987 2971 # All set to `discard` if `no-backup` is set do avoid checking
2988 2972 # no_backup lower in the code.
2989 2973 # These values are ordered for comparison purposes
2990 2974 backup = 2 # unconditionally do backup
2991 2975 check = 1 # check if the existing file differs from target
2992 2976 discard = 0 # never do backup
2993 2977 if opts.get('no_backup'):
2994 2978 backup = check = discard
2995 2979
2996 2980 backupanddel = actions['remove']
2997 2981 if not opts.get('no_backup'):
2998 2982 backupanddel = actions['drop']
2999 2983
3000 2984 disptable = (
3001 2985 # dispatch table:
3002 2986 # file state
3003 2987 # action
3004 2988 # make backup
3005 2989
3006 2990 ## Sets that results that will change file on disk
3007 2991 # Modified compared to target, no local change
3008 2992 (modified, actions['revert'], discard),
3009 2993 # Modified compared to target, but local file is deleted
3010 2994 (deleted, actions['revert'], discard),
3011 2995 # Modified compared to target, local change
3012 2996 (dsmodified, actions['revert'], backup),
3013 2997 # Added since target
3014 2998 (added, actions['remove'], discard),
3015 2999 # Added in working directory
3016 3000 (dsadded, actions['forget'], discard),
3017 3001 # Added since target, have local modification
3018 3002 (modadded, backupanddel, backup),
3019 3003 # Added since target but file is missing in working directory
3020 3004 (deladded, actions['drop'], discard),
3021 3005 # Removed since target, before working copy parent
3022 3006 (removed, actions['add'], discard),
3023 3007 # Same as `removed` but an unknown file exists at the same path
3024 3008 (removunk, actions['add'], check),
3025 3009 # Removed since targe, marked as such in working copy parent
3026 3010 (dsremoved, actions['undelete'], discard),
3027 3011 # Same as `dsremoved` but an unknown file exists at the same path
3028 3012 (dsremovunk, actions['undelete'], check),
3029 3013 ## the following sets does not result in any file changes
3030 3014 # File with no modification
3031 3015 (clean, actions['noop'], discard),
3032 3016 # Existing file, not tracked anywhere
3033 3017 (unknown, actions['unknown'], discard),
3034 3018 )
3035 3019
3036 3020 for abs, (rel, exact) in sorted(names.items()):
3037 3021 # target file to be touch on disk (relative to cwd)
3038 3022 target = repo.wjoin(abs)
3039 3023 # search the entry in the dispatch table.
3040 3024 # if the file is in any of these sets, it was touched in the working
3041 3025 # directory parent and we are sure it needs to be reverted.
3042 3026 for table, (xlist, msg), dobackup in disptable:
3043 3027 if abs not in table:
3044 3028 continue
3045 3029 if xlist is not None:
3046 3030 xlist.append(abs)
3047 3031 if dobackup and (backup <= dobackup
3048 3032 or wctx[abs].cmp(ctx[abs])):
3049 3033 bakname = "%s.orig" % rel
3050 3034 ui.note(_('saving current version of %s as %s\n') %
3051 3035 (rel, bakname))
3052 3036 if not opts.get('dry_run'):
3053 3037 if interactive:
3054 3038 util.copyfile(target, bakname)
3055 3039 else:
3056 3040 util.rename(target, bakname)
3057 3041 if ui.verbose or not exact:
3058 3042 if not isinstance(msg, basestring):
3059 3043 msg = msg(abs)
3060 3044 ui.status(msg % rel)
3061 3045 elif exact:
3062 3046 ui.warn(msg % rel)
3063 3047 break
3064 3048
3065 3049 if not opts.get('dry_run'):
3066 3050 needdata = ('revert', 'add', 'undelete')
3067 3051 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3068 3052 _performrevert(repo, parents, ctx, actions, interactive)
3069 3053
3070 3054 if targetsubs:
3071 3055 # Revert the subrepos on the revert list
3072 3056 for sub in targetsubs:
3073 3057 try:
3074 3058 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3075 3059 except KeyError:
3076 3060 raise util.Abort("subrepository '%s' does not exist in %s!"
3077 3061 % (sub, short(ctx.node())))
3078 3062 finally:
3079 3063 wlock.release()
3080 3064
3081 3065 def _revertprefetch(repo, ctx, *files):
3082 3066 """Let extension changing the storage layer prefetch content"""
3083 3067 pass
3084 3068
3085 3069 def _performrevert(repo, parents, ctx, actions, interactive=False):
3086 3070 """function that actually perform all the actions computed for revert
3087 3071
3088 3072 This is an independent function to let extension to plug in and react to
3089 3073 the imminent revert.
3090 3074
3091 3075 Make sure you have the working directory locked when calling this function.
3092 3076 """
3093 3077 parent, p2 = parents
3094 3078 node = ctx.node()
3095 3079 def checkout(f):
3096 3080 fc = ctx[f]
3097 3081 repo.wwrite(f, fc.data(), fc.flags())
3098 3082
3099 3083 audit_path = pathutil.pathauditor(repo.root)
3100 3084 for f in actions['forget'][0]:
3101 3085 repo.dirstate.drop(f)
3102 3086 for f in actions['remove'][0]:
3103 3087 audit_path(f)
3104 3088 try:
3105 3089 util.unlinkpath(repo.wjoin(f))
3106 3090 except OSError:
3107 3091 pass
3108 3092 repo.dirstate.remove(f)
3109 3093 for f in actions['drop'][0]:
3110 3094 audit_path(f)
3111 3095 repo.dirstate.remove(f)
3112 3096
3113 3097 normal = None
3114 3098 if node == parent:
3115 3099 # We're reverting to our parent. If possible, we'd like status
3116 3100 # to report the file as clean. We have to use normallookup for
3117 3101 # merges to avoid losing information about merged/dirty files.
3118 3102 if p2 != nullid:
3119 3103 normal = repo.dirstate.normallookup
3120 3104 else:
3121 3105 normal = repo.dirstate.normal
3122 3106
3123 3107 newlyaddedandmodifiedfiles = set()
3124 3108 if interactive:
3125 3109 # Prompt the user for changes to revert
3126 3110 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3127 3111 m = scmutil.match(ctx, torevert, {})
3128 3112 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3129 3113 diffopts.nodates = True
3130 3114 diffopts.git = True
3131 3115 reversehunks = repo.ui.configbool('experimental',
3132 3116 'revertalternateinteractivemode',
3133 3117 True)
3134 3118 if reversehunks:
3135 3119 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3136 3120 else:
3137 3121 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3138 3122 originalchunks = patch.parsepatch(diff)
3139 3123
3140 3124 try:
3141 3125
3142 3126 chunks = recordfilter(repo.ui, originalchunks)
3143 3127 if reversehunks:
3144 3128 chunks = patch.reversehunks(chunks)
3145 3129
3146 3130 except patch.PatchError as err:
3147 3131 raise util.Abort(_('error parsing patch: %s') % err)
3148 3132
3149 3133 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3150 3134 # Apply changes
3151 3135 fp = cStringIO.StringIO()
3152 3136 for c in chunks:
3153 3137 c.write(fp)
3154 3138 dopatch = fp.tell()
3155 3139 fp.seek(0)
3156 3140 if dopatch:
3157 3141 try:
3158 3142 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3159 3143 except patch.PatchError as err:
3160 3144 raise util.Abort(str(err))
3161 3145 del fp
3162 3146 else:
3163 3147 for f in actions['revert'][0]:
3164 3148 checkout(f)
3165 3149 if normal:
3166 3150 normal(f)
3167 3151
3168 3152 for f in actions['add'][0]:
3169 3153 # Don't checkout modified files, they are already created by the diff
3170 3154 if f not in newlyaddedandmodifiedfiles:
3171 3155 checkout(f)
3172 3156 repo.dirstate.add(f)
3173 3157
3174 3158 normal = repo.dirstate.normallookup
3175 3159 if node == parent and p2 == nullid:
3176 3160 normal = repo.dirstate.normal
3177 3161 for f in actions['undelete'][0]:
3178 3162 checkout(f)
3179 3163 normal(f)
3180 3164
3181 3165 copied = copies.pathcopies(repo[parent], ctx)
3182 3166
3183 3167 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3184 3168 if f in copied:
3185 3169 repo.dirstate.copy(copied[f], f)
3186 3170
3187 3171 def command(table):
3188 3172 """Returns a function object to be used as a decorator for making commands.
3189 3173
3190 3174 This function receives a command table as its argument. The table should
3191 3175 be a dict.
3192 3176
3193 3177 The returned function can be used as a decorator for adding commands
3194 3178 to that command table. This function accepts multiple arguments to define
3195 3179 a command.
3196 3180
3197 3181 The first argument is the command name.
3198 3182
3199 3183 The options argument is an iterable of tuples defining command arguments.
3200 3184 See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
3201 3185
3202 3186 The synopsis argument defines a short, one line summary of how to use the
3203 3187 command. This shows up in the help output.
3204 3188
3205 3189 The norepo argument defines whether the command does not require a
3206 3190 local repository. Most commands operate against a repository, thus the
3207 3191 default is False.
3208 3192
3209 3193 The optionalrepo argument defines whether the command optionally requires
3210 3194 a local repository.
3211 3195
3212 3196 The inferrepo argument defines whether to try to find a repository from the
3213 3197 command line arguments. If True, arguments will be examined for potential
3214 3198 repository locations. See ``findrepo()``. If a repository is found, it
3215 3199 will be used.
3216 3200 """
3217 3201 def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False,
3218 3202 inferrepo=False):
3219 3203 def decorator(func):
3220 3204 if synopsis:
3221 3205 table[name] = func, list(options), synopsis
3222 3206 else:
3223 3207 table[name] = func, list(options)
3224 3208
3225 3209 if norepo:
3226 3210 # Avoid import cycle.
3227 3211 import commands
3228 3212 commands.norepo += ' %s' % ' '.join(parsealiases(name))
3229 3213
3230 3214 if optionalrepo:
3231 3215 import commands
3232 3216 commands.optionalrepo += ' %s' % ' '.join(parsealiases(name))
3233 3217
3234 3218 if inferrepo:
3235 3219 import commands
3236 3220 commands.inferrepo += ' %s' % ' '.join(parsealiases(name))
3237 3221
3238 3222 return func
3239 3223 return decorator
3240 3224
3241 3225 return cmd
3242 3226
3243 3227 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3244 3228 # commands.outgoing. "missing" is "missing" of the result of
3245 3229 # "findcommonoutgoing()"
3246 3230 outgoinghooks = util.hooks()
3247 3231
3248 3232 # a list of (ui, repo) functions called by commands.summary
3249 3233 summaryhooks = util.hooks()
3250 3234
3251 3235 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3252 3236 #
3253 3237 # functions should return tuple of booleans below, if 'changes' is None:
3254 3238 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3255 3239 #
3256 3240 # otherwise, 'changes' is a tuple of tuples below:
3257 3241 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3258 3242 # - (desturl, destbranch, destpeer, outgoing)
3259 3243 summaryremotehooks = util.hooks()
3260 3244
3261 3245 # A list of state files kept by multistep operations like graft.
3262 3246 # Since graft cannot be aborted, it is considered 'clearable' by update.
3263 3247 # note: bisect is intentionally excluded
3264 3248 # (state file, clearable, allowcommit, error, hint)
3265 3249 unfinishedstates = [
3266 3250 ('graftstate', True, False, _('graft in progress'),
3267 3251 _("use 'hg graft --continue' or 'hg update' to abort")),
3268 3252 ('updatestate', True, False, _('last update was interrupted'),
3269 3253 _("use 'hg update' to get a consistent checkout"))
3270 3254 ]
3271 3255
3272 3256 def checkunfinished(repo, commit=False):
3273 3257 '''Look for an unfinished multistep operation, like graft, and abort
3274 3258 if found. It's probably good to check this right before
3275 3259 bailifchanged().
3276 3260 '''
3277 3261 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3278 3262 if commit and allowcommit:
3279 3263 continue
3280 3264 if repo.vfs.exists(f):
3281 3265 raise util.Abort(msg, hint=hint)
3282 3266
3283 3267 def clearunfinished(repo):
3284 3268 '''Check for unfinished operations (as above), and clear the ones
3285 3269 that are clearable.
3286 3270 '''
3287 3271 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3288 3272 if not clearable and repo.vfs.exists(f):
3289 3273 raise util.Abort(msg, hint=hint)
3290 3274 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3291 3275 if clearable and repo.vfs.exists(f):
3292 3276 util.unlink(repo.join(f))
3293 3277
3294 3278 class dirstateguard(object):
3295 3279 '''Restore dirstate at unexpected failure.
3296 3280
3297 3281 At the construction, this class does:
3298 3282
3299 3283 - write current ``repo.dirstate`` out, and
3300 3284 - save ``.hg/dirstate`` into the backup file
3301 3285
3302 3286 This restores ``.hg/dirstate`` from backup file, if ``release()``
3303 3287 is invoked before ``close()``.
3304 3288
3305 3289 This just removes the backup file at ``close()`` before ``release()``.
3306 3290 '''
3307 3291
3308 3292 def __init__(self, repo, name):
3309 3293 repo.dirstate.write()
3310 3294 self._repo = repo
3311 3295 self._filename = 'dirstate.backup.%s.%d' % (name, id(self))
3312 3296 repo.vfs.write(self._filename, repo.vfs.tryread('dirstate'))
3313 3297 self._active = True
3314 3298 self._closed = False
3315 3299
3316 3300 def __del__(self):
3317 3301 if self._active: # still active
3318 3302 # this may occur, even if this class is used correctly:
3319 3303 # for example, releasing other resources like transaction
3320 3304 # may raise exception before ``dirstateguard.release`` in
3321 3305 # ``release(tr, ....)``.
3322 3306 self._abort()
3323 3307
3324 3308 def close(self):
3325 3309 if not self._active: # already inactivated
3326 3310 msg = (_("can't close already inactivated backup: %s")
3327 3311 % self._filename)
3328 3312 raise util.Abort(msg)
3329 3313
3330 3314 self._repo.vfs.unlink(self._filename)
3331 3315 self._active = False
3332 3316 self._closed = True
3333 3317
3334 3318 def _abort(self):
3335 3319 # this "invalidate()" prevents "wlock.release()" from writing
3336 3320 # changes of dirstate out after restoring to original status
3337 3321 self._repo.dirstate.invalidate()
3338 3322
3339 3323 self._repo.vfs.rename(self._filename, 'dirstate')
3340 3324 self._active = False
3341 3325
3342 3326 def release(self):
3343 3327 if not self._closed:
3344 3328 if not self._active: # already inactivated
3345 3329 msg = (_("can't release already inactivated backup: %s")
3346 3330 % self._filename)
3347 3331 raise util.Abort(msg)
3348 3332 self._abort()
@@ -1,508 +1,514 b''
1 1 # templatekw.py - common changeset template keywords
2 2 #
3 3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 from .node import hex
11 11 from . import (
12 12 error,
13 13 hbisect,
14 14 patch,
15 15 scmutil,
16 16 util,
17 17 )
18 18
19 19 # This helper class allows us to handle both:
20 20 # "{files}" (legacy command-line-specific list hack) and
21 21 # "{files % '{file}\n'}" (hgweb-style with inlining and function support)
22 22 # and to access raw values:
23 23 # "{ifcontains(file, files, ...)}", "{ifcontains(key, extras, ...)}"
24 24 # "{get(extras, key)}"
25 25
26 26 class _hybrid(object):
27 27 def __init__(self, gen, values, makemap, joinfmt=None):
28 28 self.gen = gen
29 29 self.values = values
30 30 self._makemap = makemap
31 31 if joinfmt:
32 32 self.joinfmt = joinfmt
33 33 else:
34 34 self.joinfmt = lambda x: x.values()[0]
35 35 def __iter__(self):
36 36 return self.gen
37 37 def __call__(self):
38 38 makemap = self._makemap
39 39 for x in self.values:
40 40 yield makemap(x)
41 41 def __contains__(self, x):
42 42 return x in self.values
43 43 def __len__(self):
44 44 return len(self.values)
45 45 def __getattr__(self, name):
46 46 if name != 'get':
47 47 raise AttributeError(name)
48 48 return getattr(self.values, name)
49 49
50 50 def showlist(name, values, plural=None, element=None, separator=' ', **args):
51 51 if not element:
52 52 element = name
53 53 f = _showlist(name, values, plural, separator, **args)
54 54 return _hybrid(f, values, lambda x: {element: x})
55 55
56 56 def _showlist(name, values, plural=None, separator=' ', **args):
57 57 '''expand set of values.
58 58 name is name of key in template map.
59 59 values is list of strings or dicts.
60 60 plural is plural of name, if not simply name + 's'.
61 61 separator is used to join values as a string
62 62
63 63 expansion works like this, given name 'foo'.
64 64
65 65 if values is empty, expand 'no_foos'.
66 66
67 67 if 'foo' not in template map, return values as a string,
68 68 joined by 'separator'.
69 69
70 70 expand 'start_foos'.
71 71
72 72 for each value, expand 'foo'. if 'last_foo' in template
73 73 map, expand it instead of 'foo' for last key.
74 74
75 75 expand 'end_foos'.
76 76 '''
77 77 templ = args['templ']
78 78 if plural:
79 79 names = plural
80 80 else: names = name + 's'
81 81 if not values:
82 82 noname = 'no_' + names
83 83 if noname in templ:
84 84 yield templ(noname, **args)
85 85 return
86 86 if name not in templ:
87 87 if isinstance(values[0], str):
88 88 yield separator.join(values)
89 89 else:
90 90 for v in values:
91 91 yield dict(v, **args)
92 92 return
93 93 startname = 'start_' + names
94 94 if startname in templ:
95 95 yield templ(startname, **args)
96 96 vargs = args.copy()
97 97 def one(v, tag=name):
98 98 try:
99 99 vargs.update(v)
100 100 except (AttributeError, ValueError):
101 101 try:
102 102 for a, b in v:
103 103 vargs[a] = b
104 104 except ValueError:
105 105 vargs[name] = v
106 106 return templ(tag, **vargs)
107 107 lastname = 'last_' + name
108 108 if lastname in templ:
109 109 last = values.pop()
110 110 else:
111 111 last = None
112 112 for v in values:
113 113 yield one(v)
114 114 if last is not None:
115 115 yield one(last, tag=lastname)
116 116 endname = 'end_' + names
117 117 if endname in templ:
118 118 yield templ(endname, **args)
119 119
120 120 def getfiles(repo, ctx, revcache):
121 121 if 'files' not in revcache:
122 122 revcache['files'] = repo.status(ctx.p1(), ctx)[:3]
123 123 return revcache['files']
124 124
125 125 def getlatesttags(repo, ctx, cache):
126 126 '''return date, distance and name for the latest tag of rev'''
127 127
128 128 if 'latesttags' not in cache:
129 129 # Cache mapping from rev to a tuple with tag date, tag
130 130 # distance and tag name
131 131 cache['latesttags'] = {-1: (0, 0, ['null'])}
132 132 latesttags = cache['latesttags']
133 133
134 134 rev = ctx.rev()
135 135 todo = [rev]
136 136 while todo:
137 137 rev = todo.pop()
138 138 if rev in latesttags:
139 139 continue
140 140 ctx = repo[rev]
141 141 tags = [t for t in ctx.tags()
142 142 if (repo.tagtype(t) and repo.tagtype(t) != 'local')]
143 143 if tags:
144 144 latesttags[rev] = ctx.date()[0], 0, [t for t in sorted(tags)]
145 145 continue
146 146 try:
147 147 # The tuples are laid out so the right one can be found by
148 148 # comparison.
149 149 pdate, pdist, ptag = max(
150 150 latesttags[p.rev()] for p in ctx.parents())
151 151 except KeyError:
152 152 # Cache miss - recurse
153 153 todo.append(rev)
154 154 todo.extend(p.rev() for p in ctx.parents())
155 155 continue
156 156 latesttags[rev] = pdate, pdist + 1, ptag
157 157 return latesttags[rev]
158 158
159 159 def getrenamedfn(repo, endrev=None):
160 160 rcache = {}
161 161 if endrev is None:
162 162 endrev = len(repo)
163 163
164 164 def getrenamed(fn, rev):
165 165 '''looks up all renames for a file (up to endrev) the first
166 166 time the file is given. It indexes on the changerev and only
167 167 parses the manifest if linkrev != changerev.
168 168 Returns rename info for fn at changerev rev.'''
169 169 if fn not in rcache:
170 170 rcache[fn] = {}
171 171 fl = repo.file(fn)
172 172 for i in fl:
173 173 lr = fl.linkrev(i)
174 174 renamed = fl.renamed(fl.node(i))
175 175 rcache[fn][lr] = renamed
176 176 if lr >= endrev:
177 177 break
178 178 if rev in rcache[fn]:
179 179 return rcache[fn][rev]
180 180
181 181 # If linkrev != rev (i.e. rev not found in rcache) fallback to
182 182 # filectx logic.
183 183 try:
184 184 return repo[rev][fn].renamed()
185 185 except error.LookupError:
186 186 return None
187 187
188 188 return getrenamed
189 189
190 190
191 191 def showauthor(repo, ctx, templ, **args):
192 192 """:author: String. The unmodified author of the changeset."""
193 193 return ctx.user()
194 194
195 195 def showbisect(repo, ctx, templ, **args):
196 196 """:bisect: String. The changeset bisection status."""
197 197 return hbisect.label(repo, ctx.node())
198 198
199 199 def showbranch(**args):
200 200 """:branch: String. The name of the branch on which the changeset was
201 201 committed.
202 202 """
203 203 return args['ctx'].branch()
204 204
205 205 def showbranches(**args):
206 206 """:branches: List of strings. The name of the branch on which the
207 207 changeset was committed. Will be empty if the branch name was
208 208 default.
209 209 """
210 210 branch = args['ctx'].branch()
211 211 if branch != 'default':
212 212 return showlist('branch', [branch], plural='branches', **args)
213 213 return showlist('branch', [], plural='branches', **args)
214 214
215 215 def showbookmarks(**args):
216 216 """:bookmarks: List of strings. Any bookmarks associated with the
217 217 changeset. Also sets 'active', the name of the active bookmark.
218 218 """
219 219 repo = args['ctx']._repo
220 220 bookmarks = args['ctx'].bookmarks()
221 221 active = repo._activebookmark
222 222 makemap = lambda v: {'bookmark': v, 'active': active, 'current': active}
223 223 f = _showlist('bookmark', bookmarks, **args)
224 224 return _hybrid(f, bookmarks, makemap, lambda x: x['bookmark'])
225 225
226 226 def showchildren(**args):
227 227 """:children: List of strings. The children of the changeset."""
228 228 ctx = args['ctx']
229 229 childrevs = ['%d:%s' % (cctx, cctx) for cctx in ctx.children()]
230 230 return showlist('children', childrevs, element='child', **args)
231 231
232 232 # Deprecated, but kept alive for help generation a purpose.
233 233 def showcurrentbookmark(**args):
234 234 """:currentbookmark: String. The active bookmark, if it is
235 235 associated with the changeset (DEPRECATED)"""
236 236 return showactivebookmark(**args)
237 237
238 238 def showactivebookmark(**args):
239 239 """:activebookmark: String. The active bookmark, if it is
240 240 associated with the changeset"""
241 241 active = args['repo']._activebookmark
242 242 if active and active in args['ctx'].bookmarks():
243 243 return active
244 244 return ''
245 245
246 246 def showdate(repo, ctx, templ, **args):
247 247 """:date: Date information. The date when the changeset was committed."""
248 248 return ctx.date()
249 249
250 250 def showdescription(repo, ctx, templ, **args):
251 251 """:desc: String. The text of the changeset description."""
252 252 return ctx.description().strip()
253 253
254 254 def showdiffstat(repo, ctx, templ, **args):
255 255 """:diffstat: String. Statistics of changes with the following format:
256 256 "modified files: +added/-removed lines"
257 257 """
258 258 stats = patch.diffstatdata(util.iterlines(ctx.diff()))
259 259 maxname, maxtotal, adds, removes, binary = patch.diffstatsum(stats)
260 260 return '%s: +%s/-%s' % (len(stats), adds, removes)
261 261
262 262 def showextras(**args):
263 263 """:extras: List of dicts with key, value entries of the 'extras'
264 264 field of this changeset."""
265 265 extras = args['ctx'].extra()
266 266 extras = util.sortdict((k, extras[k]) for k in sorted(extras))
267 267 makemap = lambda k: {'key': k, 'value': extras[k]}
268 268 c = [makemap(k) for k in extras]
269 269 f = _showlist('extra', c, plural='extras', **args)
270 270 return _hybrid(f, extras, makemap,
271 271 lambda x: '%s=%s' % (x['key'], x['value']))
272 272
273 273 def showfileadds(**args):
274 274 """:file_adds: List of strings. Files added by this changeset."""
275 275 repo, ctx, revcache = args['repo'], args['ctx'], args['revcache']
276 276 return showlist('file_add', getfiles(repo, ctx, revcache)[1],
277 277 element='file', **args)
278 278
279 279 def showfilecopies(**args):
280 280 """:file_copies: List of strings. Files copied in this changeset with
281 281 their sources.
282 282 """
283 283 cache, ctx = args['cache'], args['ctx']
284 284 copies = args['revcache'].get('copies')
285 285 if copies is None:
286 286 if 'getrenamed' not in cache:
287 287 cache['getrenamed'] = getrenamedfn(args['repo'])
288 288 copies = []
289 289 getrenamed = cache['getrenamed']
290 290 for fn in ctx.files():
291 291 rename = getrenamed(fn, ctx.rev())
292 292 if rename:
293 293 copies.append((fn, rename[0]))
294 294
295 295 copies = util.sortdict(copies)
296 296 makemap = lambda k: {'name': k, 'source': copies[k]}
297 297 c = [makemap(k) for k in copies]
298 298 f = _showlist('file_copy', c, plural='file_copies', **args)
299 299 return _hybrid(f, copies, makemap,
300 300 lambda x: '%s (%s)' % (x['name'], x['source']))
301 301
302 302 # showfilecopiesswitch() displays file copies only if copy records are
303 303 # provided before calling the templater, usually with a --copies
304 304 # command line switch.
305 305 def showfilecopiesswitch(**args):
306 306 """:file_copies_switch: List of strings. Like "file_copies" but displayed
307 307 only if the --copied switch is set.
308 308 """
309 309 copies = args['revcache'].get('copies') or []
310 310 copies = util.sortdict(copies)
311 311 makemap = lambda k: {'name': k, 'source': copies[k]}
312 312 c = [makemap(k) for k in copies]
313 313 f = _showlist('file_copy', c, plural='file_copies', **args)
314 314 return _hybrid(f, copies, makemap,
315 315 lambda x: '%s (%s)' % (x['name'], x['source']))
316 316
317 317 def showfiledels(**args):
318 318 """:file_dels: List of strings. Files removed by this changeset."""
319 319 repo, ctx, revcache = args['repo'], args['ctx'], args['revcache']
320 320 return showlist('file_del', getfiles(repo, ctx, revcache)[2],
321 321 element='file', **args)
322 322
323 323 def showfilemods(**args):
324 324 """:file_mods: List of strings. Files modified by this changeset."""
325 325 repo, ctx, revcache = args['repo'], args['ctx'], args['revcache']
326 326 return showlist('file_mod', getfiles(repo, ctx, revcache)[0],
327 327 element='file', **args)
328 328
329 329 def showfiles(**args):
330 330 """:files: List of strings. All files modified, added, or removed by this
331 331 changeset.
332 332 """
333 333 return showlist('file', args['ctx'].files(), **args)
334 334
335 335 def showlatesttag(**args):
336 336 """:latesttag: List of strings. The global tags on the most recent globally
337 337 tagged ancestor of this changeset.
338 338 """
339 339 repo, ctx = args['repo'], args['ctx']
340 340 cache = args['cache']
341 341 latesttags = getlatesttags(repo, ctx, cache)[2]
342 342
343 343 return showlist('latesttag', latesttags, separator=':', **args)
344 344
345 345 def showlatesttagdistance(repo, ctx, templ, cache, **args):
346 346 """:latesttagdistance: Integer. Longest path to the latest tag."""
347 347 return getlatesttags(repo, ctx, cache)[1]
348 348
349 349 def showchangessincelatesttag(repo, ctx, templ, cache, **args):
350 350 """:changessincelatesttag: Integer. All ancestors not in the latest tag."""
351 351 latesttag = getlatesttags(repo, ctx, cache)[2][0]
352 352 offset = 0
353 353 revs = [ctx.rev()]
354 354
355 355 # The only() revset doesn't currently support wdir()
356 356 if ctx.rev() is None:
357 357 offset = 1
358 358 revs = [p.rev() for p in ctx.parents()]
359 359
360 360 return len(repo.revs('only(%ld, %s)', revs, latesttag)) + offset
361 361
362 362 def showmanifest(**args):
363 363 repo, ctx, templ = args['repo'], args['ctx'], args['templ']
364 364 mnode = ctx.manifestnode()
365 365 if mnode is None:
366 366 # just avoid crash, we might want to use the 'ff...' hash in future
367 367 return
368 368 args = args.copy()
369 369 args.update({'rev': repo.manifest.rev(mnode), 'node': hex(mnode)})
370 370 return templ('manifest', **args)
371 371
372 372 def shownode(repo, ctx, templ, **args):
373 373 """:node: String. The changeset identification hash, as a 40 hexadecimal
374 374 digit string.
375 375 """
376 376 return ctx.hex()
377 377
378 378 def showp1rev(repo, ctx, templ, **args):
379 379 """:p1rev: Integer. The repository-local revision number of the changeset's
380 380 first parent, or -1 if the changeset has no parents."""
381 381 return ctx.p1().rev()
382 382
383 383 def showp2rev(repo, ctx, templ, **args):
384 384 """:p2rev: Integer. The repository-local revision number of the changeset's
385 385 second parent, or -1 if the changeset has no second parent."""
386 386 return ctx.p2().rev()
387 387
388 388 def showp1node(repo, ctx, templ, **args):
389 389 """:p1node: String. The identification hash of the changeset's first parent,
390 390 as a 40 digit hexadecimal string. If the changeset has no parents, all
391 391 digits are 0."""
392 392 return ctx.p1().hex()
393 393
394 394 def showp2node(repo, ctx, templ, **args):
395 395 """:p2node: String. The identification hash of the changeset's second
396 396 parent, as a 40 digit hexadecimal string. If the changeset has no second
397 397 parent, all digits are 0."""
398 398 return ctx.p2().hex()
399 399
400 def _showparents(**args):
400 def showparents(**args):
401 401 """:parents: List of strings. The parents of the changeset in "rev:node"
402 402 format. If the changeset has only one "natural" parent (the predecessor
403 403 revision) nothing is shown."""
404 pass
404 repo = args['repo']
405 ctx = args['ctx']
406 parents = [[('rev', p.rev()),
407 ('node', p.hex()),
408 ('phase', p.phasestr())]
409 for p in scmutil.meaningfulparents(repo, ctx)]
410 return showlist('parent', parents, **args)
405 411
406 412 def showphase(repo, ctx, templ, **args):
407 413 """:phase: String. The changeset phase name."""
408 414 return ctx.phasestr()
409 415
410 416 def showphaseidx(repo, ctx, templ, **args):
411 417 """:phaseidx: Integer. The changeset phase index."""
412 418 return ctx.phase()
413 419
414 420 def showrev(repo, ctx, templ, **args):
415 421 """:rev: Integer. The repository-local changeset revision number."""
416 422 return scmutil.intrev(ctx.rev())
417 423
418 424 def showrevslist(name, revs, **args):
419 425 """helper to generate a list of revisions in which a mapped template will
420 426 be evaluated"""
421 427 repo = args['ctx'].repo()
422 428 f = _showlist(name, revs, **args)
423 429 return _hybrid(f, revs,
424 430 lambda x: {name: x, 'ctx': repo[x], 'revcache': {}})
425 431
426 432 def showsubrepos(**args):
427 433 """:subrepos: List of strings. Updated subrepositories in the changeset."""
428 434 ctx = args['ctx']
429 435 substate = ctx.substate
430 436 if not substate:
431 437 return showlist('subrepo', [], **args)
432 438 psubstate = ctx.parents()[0].substate or {}
433 439 subrepos = []
434 440 for sub in substate:
435 441 if sub not in psubstate or substate[sub] != psubstate[sub]:
436 442 subrepos.append(sub) # modified or newly added in ctx
437 443 for sub in psubstate:
438 444 if sub not in substate:
439 445 subrepos.append(sub) # removed in ctx
440 446 return showlist('subrepo', sorted(subrepos), **args)
441 447
442 448 def shownames(namespace, **args):
443 449 """helper method to generate a template keyword for a namespace"""
444 450 ctx = args['ctx']
445 451 repo = ctx.repo()
446 452 ns = repo.names[namespace]
447 453 names = ns.names(repo, ctx.node())
448 454 return showlist(ns.templatename, names, plural=namespace, **args)
449 455
450 456 # don't remove "showtags" definition, even though namespaces will put
451 457 # a helper function for "tags" keyword into "keywords" map automatically,
452 458 # because online help text is built without namespaces initialization
453 459 def showtags(**args):
454 460 """:tags: List of strings. Any tags associated with the changeset."""
455 461 return shownames('tags', **args)
456 462
457 463 # keywords are callables like:
458 464 # fn(repo, ctx, templ, cache, revcache, **args)
459 465 # with:
460 466 # repo - current repository instance
461 467 # ctx - the changectx being displayed
462 468 # templ - the templater instance
463 469 # cache - a cache dictionary for the whole templater run
464 470 # revcache - a cache dictionary for the current revision
465 471 keywords = {
466 472 'activebookmark': showactivebookmark,
467 473 'author': showauthor,
468 474 'bisect': showbisect,
469 475 'branch': showbranch,
470 476 'branches': showbranches,
471 477 'bookmarks': showbookmarks,
472 478 'changessincelatesttag': showchangessincelatesttag,
473 479 'children': showchildren,
474 480 # currentbookmark is deprecated
475 481 'currentbookmark': showcurrentbookmark,
476 482 'date': showdate,
477 483 'desc': showdescription,
478 484 'diffstat': showdiffstat,
479 485 'extras': showextras,
480 486 'file_adds': showfileadds,
481 487 'file_copies': showfilecopies,
482 488 'file_copies_switch': showfilecopiesswitch,
483 489 'file_dels': showfiledels,
484 490 'file_mods': showfilemods,
485 491 'files': showfiles,
486 492 'latesttag': showlatesttag,
487 493 'latesttagdistance': showlatesttagdistance,
488 494 'manifest': showmanifest,
489 495 'node': shownode,
490 496 'p1rev': showp1rev,
491 497 'p1node': showp1node,
492 498 'p2rev': showp2rev,
493 499 'p2node': showp2node,
500 'parents': showparents,
494 501 'phase': showphase,
495 502 'phaseidx': showphaseidx,
496 503 'rev': showrev,
497 504 'subrepos': showsubrepos,
498 505 'tags': showtags,
499 506 }
500 507
501 508 dockeywords = {
502 'parents': _showparents,
503 509 }
504 510 dockeywords.update(keywords)
505 511 del dockeywords['branches']
506 512
507 513 # tell hggettext to extract docstrings from these functions:
508 514 i18nfunctions = dockeywords.values()
General Comments 0
You need to be logged in to leave comments. Login now