##// END OF EJS Templates
cmdutil: reimplement file wrapper that disables close()...
Yuya Nishihara -
r27418:2ce4661a default
parent child Browse files
Show More
@@ -1,3418 +1,3421 b''
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import hex, bin, nullid, nullrev, short
9 9 from i18n import _
10 10 import os, sys, errno, re, tempfile, cStringIO
11 11 import util, scmutil, templater, patch, error, templatekw, revlog, copies
12 12 import match as matchmod
13 13 import repair, graphmod, revset, phases, obsolete, pathutil
14 14 import changelog
15 15 import bookmarks
16 16 import encoding
17 17 import formatter
18 18 import crecord as crecordmod
19 19 import lock as lockmod
20 20
21 21 def ishunk(x):
22 22 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
23 23 return isinstance(x, hunkclasses)
24 24
25 25 def newandmodified(chunks, originalchunks):
26 26 newlyaddedandmodifiedfiles = set()
27 27 for chunk in chunks:
28 28 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
29 29 originalchunks:
30 30 newlyaddedandmodifiedfiles.add(chunk.header.filename())
31 31 return newlyaddedandmodifiedfiles
32 32
33 33 def parsealiases(cmd):
34 34 return cmd.lstrip("^").split("|")
35 35
36 36 def setupwrapcolorwrite(ui):
37 37 # wrap ui.write so diff output can be labeled/colorized
38 38 def wrapwrite(orig, *args, **kw):
39 39 label = kw.pop('label', '')
40 40 for chunk, l in patch.difflabel(lambda: args):
41 41 orig(chunk, label=label + l)
42 42
43 43 oldwrite = ui.write
44 44 def wrap(*args, **kwargs):
45 45 return wrapwrite(oldwrite, *args, **kwargs)
46 46 setattr(ui, 'write', wrap)
47 47 return oldwrite
48 48
49 49 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
50 50 if usecurses:
51 51 if testfile:
52 52 recordfn = crecordmod.testdecorator(testfile,
53 53 crecordmod.testchunkselector)
54 54 else:
55 55 recordfn = crecordmod.chunkselector
56 56
57 57 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
58 58
59 59 else:
60 60 return patch.filterpatch(ui, originalhunks, operation)
61 61
62 62 def recordfilter(ui, originalhunks, operation=None):
63 63 """ Prompts the user to filter the originalhunks and return a list of
64 64 selected hunks.
65 65 *operation* is used for ui purposes to indicate the user
66 66 what kind of filtering they are doing: reverting, committing, shelving, etc.
67 67 *operation* has to be a translated string.
68 68 """
69 69 usecurses = ui.configbool('experimental', 'crecord', False)
70 70 testfile = ui.config('experimental', 'crecordtest', None)
71 71 oldwrite = setupwrapcolorwrite(ui)
72 72 try:
73 73 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
74 74 testfile, operation)
75 75 finally:
76 76 ui.write = oldwrite
77 77 return newchunks, newopts
78 78
79 79 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
80 80 filterfn, *pats, **opts):
81 81 import merge as mergemod
82 82
83 83 if not ui.interactive():
84 84 if cmdsuggest:
85 85 msg = _('running non-interactively, use %s instead') % cmdsuggest
86 86 else:
87 87 msg = _('running non-interactively')
88 88 raise error.Abort(msg)
89 89
90 90 # make sure username is set before going interactive
91 91 if not opts.get('user'):
92 92 ui.username() # raise exception, username not provided
93 93
94 94 def recordfunc(ui, repo, message, match, opts):
95 95 """This is generic record driver.
96 96
97 97 Its job is to interactively filter local changes, and
98 98 accordingly prepare working directory into a state in which the
99 99 job can be delegated to a non-interactive commit command such as
100 100 'commit' or 'qrefresh'.
101 101
102 102 After the actual job is done by non-interactive command, the
103 103 working directory is restored to its original state.
104 104
105 105 In the end we'll record interesting changes, and everything else
106 106 will be left in place, so the user can continue working.
107 107 """
108 108
109 109 checkunfinished(repo, commit=True)
110 110 merge = len(repo[None].parents()) > 1
111 111 if merge:
112 112 raise error.Abort(_('cannot partially commit a merge '
113 113 '(use "hg commit" instead)'))
114 114
115 115 status = repo.status(match=match)
116 116 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
117 117 diffopts.nodates = True
118 118 diffopts.git = True
119 119 diffopts.showfunc = True
120 120 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
121 121 originalchunks = patch.parsepatch(originaldiff)
122 122
123 123 # 1. filter patch, so we have intending-to apply subset of it
124 124 try:
125 125 chunks, newopts = filterfn(ui, originalchunks)
126 126 except patch.PatchError as err:
127 127 raise error.Abort(_('error parsing patch: %s') % err)
128 128 opts.update(newopts)
129 129
130 130 # We need to keep a backup of files that have been newly added and
131 131 # modified during the recording process because there is a previous
132 132 # version without the edit in the workdir
133 133 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
134 134 contenders = set()
135 135 for h in chunks:
136 136 try:
137 137 contenders.update(set(h.files()))
138 138 except AttributeError:
139 139 pass
140 140
141 141 changed = status.modified + status.added + status.removed
142 142 newfiles = [f for f in changed if f in contenders]
143 143 if not newfiles:
144 144 ui.status(_('no changes to record\n'))
145 145 return 0
146 146
147 147 modified = set(status.modified)
148 148
149 149 # 2. backup changed files, so we can restore them in the end
150 150
151 151 if backupall:
152 152 tobackup = changed
153 153 else:
154 154 tobackup = [f for f in newfiles if f in modified or f in \
155 155 newlyaddedandmodifiedfiles]
156 156 backups = {}
157 157 if tobackup:
158 158 backupdir = repo.join('record-backups')
159 159 try:
160 160 os.mkdir(backupdir)
161 161 except OSError as err:
162 162 if err.errno != errno.EEXIST:
163 163 raise
164 164 try:
165 165 # backup continues
166 166 for f in tobackup:
167 167 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
168 168 dir=backupdir)
169 169 os.close(fd)
170 170 ui.debug('backup %r as %r\n' % (f, tmpname))
171 171 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
172 172 backups[f] = tmpname
173 173
174 174 fp = cStringIO.StringIO()
175 175 for c in chunks:
176 176 fname = c.filename()
177 177 if fname in backups:
178 178 c.write(fp)
179 179 dopatch = fp.tell()
180 180 fp.seek(0)
181 181
182 182 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
183 183 # 3a. apply filtered patch to clean repo (clean)
184 184 if backups:
185 185 # Equivalent to hg.revert
186 186 m = scmutil.matchfiles(repo, backups.keys())
187 187 mergemod.update(repo, repo.dirstate.p1(),
188 188 False, True, matcher=m)
189 189
190 190 # 3b. (apply)
191 191 if dopatch:
192 192 try:
193 193 ui.debug('applying patch\n')
194 194 ui.debug(fp.getvalue())
195 195 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
196 196 except patch.PatchError as err:
197 197 raise error.Abort(str(err))
198 198 del fp
199 199
200 200 # 4. We prepared working directory according to filtered
201 201 # patch. Now is the time to delegate the job to
202 202 # commit/qrefresh or the like!
203 203
204 204 # Make all of the pathnames absolute.
205 205 newfiles = [repo.wjoin(nf) for nf in newfiles]
206 206 return commitfunc(ui, repo, *newfiles, **opts)
207 207 finally:
208 208 # 5. finally restore backed-up files
209 209 try:
210 210 dirstate = repo.dirstate
211 211 for realname, tmpname in backups.iteritems():
212 212 ui.debug('restoring %r to %r\n' % (tmpname, realname))
213 213
214 214 if dirstate[realname] == 'n':
215 215 # without normallookup, restoring timestamp
216 216 # may cause partially committed files
217 217 # to be treated as unmodified
218 218 dirstate.normallookup(realname)
219 219
220 220 # copystat=True here and above are a hack to trick any
221 221 # editors that have f open that we haven't modified them.
222 222 #
223 223 # Also note that this racy as an editor could notice the
224 224 # file's mtime before we've finished writing it.
225 225 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
226 226 os.unlink(tmpname)
227 227 if tobackup:
228 228 os.rmdir(backupdir)
229 229 except OSError:
230 230 pass
231 231
232 232 def recordinwlock(ui, repo, message, match, opts):
233 233 wlock = repo.wlock()
234 234 try:
235 235 return recordfunc(ui, repo, message, match, opts)
236 236 finally:
237 237 wlock.release()
238 238
239 239 return commit(ui, repo, recordinwlock, pats, opts)
240 240
241 241 def findpossible(cmd, table, strict=False):
242 242 """
243 243 Return cmd -> (aliases, command table entry)
244 244 for each matching command.
245 245 Return debug commands (or their aliases) only if no normal command matches.
246 246 """
247 247 choice = {}
248 248 debugchoice = {}
249 249
250 250 if cmd in table:
251 251 # short-circuit exact matches, "log" alias beats "^log|history"
252 252 keys = [cmd]
253 253 else:
254 254 keys = table.keys()
255 255
256 256 allcmds = []
257 257 for e in keys:
258 258 aliases = parsealiases(e)
259 259 allcmds.extend(aliases)
260 260 found = None
261 261 if cmd in aliases:
262 262 found = cmd
263 263 elif not strict:
264 264 for a in aliases:
265 265 if a.startswith(cmd):
266 266 found = a
267 267 break
268 268 if found is not None:
269 269 if aliases[0].startswith("debug") or found.startswith("debug"):
270 270 debugchoice[found] = (aliases, table[e])
271 271 else:
272 272 choice[found] = (aliases, table[e])
273 273
274 274 if not choice and debugchoice:
275 275 choice = debugchoice
276 276
277 277 return choice, allcmds
278 278
279 279 def findcmd(cmd, table, strict=True):
280 280 """Return (aliases, command table entry) for command string."""
281 281 choice, allcmds = findpossible(cmd, table, strict)
282 282
283 283 if cmd in choice:
284 284 return choice[cmd]
285 285
286 286 if len(choice) > 1:
287 287 clist = choice.keys()
288 288 clist.sort()
289 289 raise error.AmbiguousCommand(cmd, clist)
290 290
291 291 if choice:
292 292 return choice.values()[0]
293 293
294 294 raise error.UnknownCommand(cmd, allcmds)
295 295
296 296 def findrepo(p):
297 297 while not os.path.isdir(os.path.join(p, ".hg")):
298 298 oldp, p = p, os.path.dirname(p)
299 299 if p == oldp:
300 300 return None
301 301
302 302 return p
303 303
304 304 def bailifchanged(repo, merge=True):
305 305 if merge and repo.dirstate.p2() != nullid:
306 306 raise error.Abort(_('outstanding uncommitted merge'))
307 307 modified, added, removed, deleted = repo.status()[:4]
308 308 if modified or added or removed or deleted:
309 309 raise error.Abort(_('uncommitted changes'))
310 310 ctx = repo[None]
311 311 for s in sorted(ctx.substate):
312 312 ctx.sub(s).bailifchanged()
313 313
314 314 def logmessage(ui, opts):
315 315 """ get the log message according to -m and -l option """
316 316 message = opts.get('message')
317 317 logfile = opts.get('logfile')
318 318
319 319 if message and logfile:
320 320 raise error.Abort(_('options --message and --logfile are mutually '
321 321 'exclusive'))
322 322 if not message and logfile:
323 323 try:
324 324 if logfile == '-':
325 325 message = ui.fin.read()
326 326 else:
327 327 message = '\n'.join(util.readfile(logfile).splitlines())
328 328 except IOError as inst:
329 329 raise error.Abort(_("can't read commit message '%s': %s") %
330 330 (logfile, inst.strerror))
331 331 return message
332 332
333 333 def mergeeditform(ctxorbool, baseformname):
334 334 """return appropriate editform name (referencing a committemplate)
335 335
336 336 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
337 337 merging is committed.
338 338
339 339 This returns baseformname with '.merge' appended if it is a merge,
340 340 otherwise '.normal' is appended.
341 341 """
342 342 if isinstance(ctxorbool, bool):
343 343 if ctxorbool:
344 344 return baseformname + ".merge"
345 345 elif 1 < len(ctxorbool.parents()):
346 346 return baseformname + ".merge"
347 347
348 348 return baseformname + ".normal"
349 349
350 350 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
351 351 editform='', **opts):
352 352 """get appropriate commit message editor according to '--edit' option
353 353
354 354 'finishdesc' is a function to be called with edited commit message
355 355 (= 'description' of the new changeset) just after editing, but
356 356 before checking empty-ness. It should return actual text to be
357 357 stored into history. This allows to change description before
358 358 storing.
359 359
360 360 'extramsg' is a extra message to be shown in the editor instead of
361 361 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
362 362 is automatically added.
363 363
364 364 'editform' is a dot-separated list of names, to distinguish
365 365 the purpose of commit text editing.
366 366
367 367 'getcommiteditor' returns 'commitforceeditor' regardless of
368 368 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
369 369 they are specific for usage in MQ.
370 370 """
371 371 if edit or finishdesc or extramsg:
372 372 return lambda r, c, s: commitforceeditor(r, c, s,
373 373 finishdesc=finishdesc,
374 374 extramsg=extramsg,
375 375 editform=editform)
376 376 elif editform:
377 377 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
378 378 else:
379 379 return commiteditor
380 380
381 381 def loglimit(opts):
382 382 """get the log limit according to option -l/--limit"""
383 383 limit = opts.get('limit')
384 384 if limit:
385 385 try:
386 386 limit = int(limit)
387 387 except ValueError:
388 388 raise error.Abort(_('limit must be a positive integer'))
389 389 if limit <= 0:
390 390 raise error.Abort(_('limit must be positive'))
391 391 else:
392 392 limit = None
393 393 return limit
394 394
395 395 def makefilename(repo, pat, node, desc=None,
396 396 total=None, seqno=None, revwidth=None, pathname=None):
397 397 node_expander = {
398 398 'H': lambda: hex(node),
399 399 'R': lambda: str(repo.changelog.rev(node)),
400 400 'h': lambda: short(node),
401 401 'm': lambda: re.sub('[^\w]', '_', str(desc))
402 402 }
403 403 expander = {
404 404 '%': lambda: '%',
405 405 'b': lambda: os.path.basename(repo.root),
406 406 }
407 407
408 408 try:
409 409 if node:
410 410 expander.update(node_expander)
411 411 if node:
412 412 expander['r'] = (lambda:
413 413 str(repo.changelog.rev(node)).zfill(revwidth or 0))
414 414 if total is not None:
415 415 expander['N'] = lambda: str(total)
416 416 if seqno is not None:
417 417 expander['n'] = lambda: str(seqno)
418 418 if total is not None and seqno is not None:
419 419 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
420 420 if pathname is not None:
421 421 expander['s'] = lambda: os.path.basename(pathname)
422 422 expander['d'] = lambda: os.path.dirname(pathname) or '.'
423 423 expander['p'] = lambda: pathname
424 424
425 425 newname = []
426 426 patlen = len(pat)
427 427 i = 0
428 428 while i < patlen:
429 429 c = pat[i]
430 430 if c == '%':
431 431 i += 1
432 432 c = pat[i]
433 433 c = expander[c]()
434 434 newname.append(c)
435 435 i += 1
436 436 return ''.join(newname)
437 437 except KeyError as inst:
438 438 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
439 439 inst.args[0])
440 440
441 class _unclosablefile(object):
442 def __init__(self, fp):
443 self._fp = fp
444
445 def close(self):
446 pass
447
448 def __iter__(self):
449 return iter(self._fp)
450
451 def __getattr__(self, attr):
452 return getattr(self._fp, attr)
453
441 454 def makefileobj(repo, pat, node=None, desc=None, total=None,
442 455 seqno=None, revwidth=None, mode='wb', modemap=None,
443 456 pathname=None):
444 457
445 458 writable = mode not in ('r', 'rb')
446 459
447 460 if not pat or pat == '-':
448 461 if writable:
449 462 fp = repo.ui.fout
450 463 else:
451 464 fp = repo.ui.fin
452 465 if util.safehasattr(fp, 'fileno'):
453 466 return os.fdopen(os.dup(fp.fileno()), mode)
454 467 else:
455 468 # if this fp can't be duped properly, return
456 469 # a dummy object that can be closed
457 class wrappedfileobj(object):
458 noop = lambda x: None
459 def __init__(self, f):
460 self.f = f
461 def __getattr__(self, attr):
462 if attr == 'close':
463 return self.noop
464 else:
465 return getattr(self.f, attr)
466
467 return wrappedfileobj(fp)
470 return _unclosablefile(fp)
468 471 if util.safehasattr(pat, 'write') and writable:
469 472 return pat
470 473 if util.safehasattr(pat, 'read') and 'r' in mode:
471 474 return pat
472 475 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
473 476 if modemap is not None:
474 477 mode = modemap.get(fn, mode)
475 478 if mode == 'wb':
476 479 modemap[fn] = 'ab'
477 480 return open(fn, mode)
478 481
479 482 def openrevlog(repo, cmd, file_, opts):
480 483 """opens the changelog, manifest, a filelog or a given revlog"""
481 484 cl = opts['changelog']
482 485 mf = opts['manifest']
483 486 dir = opts['dir']
484 487 msg = None
485 488 if cl and mf:
486 489 msg = _('cannot specify --changelog and --manifest at the same time')
487 490 elif cl and dir:
488 491 msg = _('cannot specify --changelog and --dir at the same time')
489 492 elif cl or mf:
490 493 if file_:
491 494 msg = _('cannot specify filename with --changelog or --manifest')
492 495 elif not repo:
493 496 msg = _('cannot specify --changelog or --manifest or --dir '
494 497 'without a repository')
495 498 if msg:
496 499 raise error.Abort(msg)
497 500
498 501 r = None
499 502 if repo:
500 503 if cl:
501 504 r = repo.unfiltered().changelog
502 505 elif dir:
503 506 if 'treemanifest' not in repo.requirements:
504 507 raise error.Abort(_("--dir can only be used on repos with "
505 508 "treemanifest enabled"))
506 509 dirlog = repo.dirlog(file_)
507 510 if len(dirlog):
508 511 r = dirlog
509 512 elif mf:
510 513 r = repo.manifest
511 514 elif file_:
512 515 filelog = repo.file(file_)
513 516 if len(filelog):
514 517 r = filelog
515 518 if not r:
516 519 if not file_:
517 520 raise error.CommandError(cmd, _('invalid arguments'))
518 521 if not os.path.isfile(file_):
519 522 raise error.Abort(_("revlog '%s' not found") % file_)
520 523 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
521 524 file_[:-2] + ".i")
522 525 return r
523 526
524 527 def copy(ui, repo, pats, opts, rename=False):
525 528 # called with the repo lock held
526 529 #
527 530 # hgsep => pathname that uses "/" to separate directories
528 531 # ossep => pathname that uses os.sep to separate directories
529 532 cwd = repo.getcwd()
530 533 targets = {}
531 534 after = opts.get("after")
532 535 dryrun = opts.get("dry_run")
533 536 wctx = repo[None]
534 537
535 538 def walkpat(pat):
536 539 srcs = []
537 540 if after:
538 541 badstates = '?'
539 542 else:
540 543 badstates = '?r'
541 544 m = scmutil.match(repo[None], [pat], opts, globbed=True)
542 545 for abs in repo.walk(m):
543 546 state = repo.dirstate[abs]
544 547 rel = m.rel(abs)
545 548 exact = m.exact(abs)
546 549 if state in badstates:
547 550 if exact and state == '?':
548 551 ui.warn(_('%s: not copying - file is not managed\n') % rel)
549 552 if exact and state == 'r':
550 553 ui.warn(_('%s: not copying - file has been marked for'
551 554 ' remove\n') % rel)
552 555 continue
553 556 # abs: hgsep
554 557 # rel: ossep
555 558 srcs.append((abs, rel, exact))
556 559 return srcs
557 560
558 561 # abssrc: hgsep
559 562 # relsrc: ossep
560 563 # otarget: ossep
561 564 def copyfile(abssrc, relsrc, otarget, exact):
562 565 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
563 566 if '/' in abstarget:
564 567 # We cannot normalize abstarget itself, this would prevent
565 568 # case only renames, like a => A.
566 569 abspath, absname = abstarget.rsplit('/', 1)
567 570 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
568 571 reltarget = repo.pathto(abstarget, cwd)
569 572 target = repo.wjoin(abstarget)
570 573 src = repo.wjoin(abssrc)
571 574 state = repo.dirstate[abstarget]
572 575
573 576 scmutil.checkportable(ui, abstarget)
574 577
575 578 # check for collisions
576 579 prevsrc = targets.get(abstarget)
577 580 if prevsrc is not None:
578 581 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
579 582 (reltarget, repo.pathto(abssrc, cwd),
580 583 repo.pathto(prevsrc, cwd)))
581 584 return
582 585
583 586 # check for overwrites
584 587 exists = os.path.lexists(target)
585 588 samefile = False
586 589 if exists and abssrc != abstarget:
587 590 if (repo.dirstate.normalize(abssrc) ==
588 591 repo.dirstate.normalize(abstarget)):
589 592 if not rename:
590 593 ui.warn(_("%s: can't copy - same file\n") % reltarget)
591 594 return
592 595 exists = False
593 596 samefile = True
594 597
595 598 if not after and exists or after and state in 'mn':
596 599 if not opts['force']:
597 600 ui.warn(_('%s: not overwriting - file exists\n') %
598 601 reltarget)
599 602 return
600 603
601 604 if after:
602 605 if not exists:
603 606 if rename:
604 607 ui.warn(_('%s: not recording move - %s does not exist\n') %
605 608 (relsrc, reltarget))
606 609 else:
607 610 ui.warn(_('%s: not recording copy - %s does not exist\n') %
608 611 (relsrc, reltarget))
609 612 return
610 613 elif not dryrun:
611 614 try:
612 615 if exists:
613 616 os.unlink(target)
614 617 targetdir = os.path.dirname(target) or '.'
615 618 if not os.path.isdir(targetdir):
616 619 os.makedirs(targetdir)
617 620 if samefile:
618 621 tmp = target + "~hgrename"
619 622 os.rename(src, tmp)
620 623 os.rename(tmp, target)
621 624 else:
622 625 util.copyfile(src, target)
623 626 srcexists = True
624 627 except IOError as inst:
625 628 if inst.errno == errno.ENOENT:
626 629 ui.warn(_('%s: deleted in working directory\n') % relsrc)
627 630 srcexists = False
628 631 else:
629 632 ui.warn(_('%s: cannot copy - %s\n') %
630 633 (relsrc, inst.strerror))
631 634 return True # report a failure
632 635
633 636 if ui.verbose or not exact:
634 637 if rename:
635 638 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
636 639 else:
637 640 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
638 641
639 642 targets[abstarget] = abssrc
640 643
641 644 # fix up dirstate
642 645 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
643 646 dryrun=dryrun, cwd=cwd)
644 647 if rename and not dryrun:
645 648 if not after and srcexists and not samefile:
646 649 util.unlinkpath(repo.wjoin(abssrc))
647 650 wctx.forget([abssrc])
648 651
649 652 # pat: ossep
650 653 # dest ossep
651 654 # srcs: list of (hgsep, hgsep, ossep, bool)
652 655 # return: function that takes hgsep and returns ossep
653 656 def targetpathfn(pat, dest, srcs):
654 657 if os.path.isdir(pat):
655 658 abspfx = pathutil.canonpath(repo.root, cwd, pat)
656 659 abspfx = util.localpath(abspfx)
657 660 if destdirexists:
658 661 striplen = len(os.path.split(abspfx)[0])
659 662 else:
660 663 striplen = len(abspfx)
661 664 if striplen:
662 665 striplen += len(os.sep)
663 666 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
664 667 elif destdirexists:
665 668 res = lambda p: os.path.join(dest,
666 669 os.path.basename(util.localpath(p)))
667 670 else:
668 671 res = lambda p: dest
669 672 return res
670 673
671 674 # pat: ossep
672 675 # dest ossep
673 676 # srcs: list of (hgsep, hgsep, ossep, bool)
674 677 # return: function that takes hgsep and returns ossep
675 678 def targetpathafterfn(pat, dest, srcs):
676 679 if matchmod.patkind(pat):
677 680 # a mercurial pattern
678 681 res = lambda p: os.path.join(dest,
679 682 os.path.basename(util.localpath(p)))
680 683 else:
681 684 abspfx = pathutil.canonpath(repo.root, cwd, pat)
682 685 if len(abspfx) < len(srcs[0][0]):
683 686 # A directory. Either the target path contains the last
684 687 # component of the source path or it does not.
685 688 def evalpath(striplen):
686 689 score = 0
687 690 for s in srcs:
688 691 t = os.path.join(dest, util.localpath(s[0])[striplen:])
689 692 if os.path.lexists(t):
690 693 score += 1
691 694 return score
692 695
693 696 abspfx = util.localpath(abspfx)
694 697 striplen = len(abspfx)
695 698 if striplen:
696 699 striplen += len(os.sep)
697 700 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
698 701 score = evalpath(striplen)
699 702 striplen1 = len(os.path.split(abspfx)[0])
700 703 if striplen1:
701 704 striplen1 += len(os.sep)
702 705 if evalpath(striplen1) > score:
703 706 striplen = striplen1
704 707 res = lambda p: os.path.join(dest,
705 708 util.localpath(p)[striplen:])
706 709 else:
707 710 # a file
708 711 if destdirexists:
709 712 res = lambda p: os.path.join(dest,
710 713 os.path.basename(util.localpath(p)))
711 714 else:
712 715 res = lambda p: dest
713 716 return res
714 717
715 718 pats = scmutil.expandpats(pats)
716 719 if not pats:
717 720 raise error.Abort(_('no source or destination specified'))
718 721 if len(pats) == 1:
719 722 raise error.Abort(_('no destination specified'))
720 723 dest = pats.pop()
721 724 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
722 725 if not destdirexists:
723 726 if len(pats) > 1 or matchmod.patkind(pats[0]):
724 727 raise error.Abort(_('with multiple sources, destination must be an '
725 728 'existing directory'))
726 729 if util.endswithsep(dest):
727 730 raise error.Abort(_('destination %s is not a directory') % dest)
728 731
729 732 tfn = targetpathfn
730 733 if after:
731 734 tfn = targetpathafterfn
732 735 copylist = []
733 736 for pat in pats:
734 737 srcs = walkpat(pat)
735 738 if not srcs:
736 739 continue
737 740 copylist.append((tfn(pat, dest, srcs), srcs))
738 741 if not copylist:
739 742 raise error.Abort(_('no files to copy'))
740 743
741 744 errors = 0
742 745 for targetpath, srcs in copylist:
743 746 for abssrc, relsrc, exact in srcs:
744 747 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
745 748 errors += 1
746 749
747 750 if errors:
748 751 ui.warn(_('(consider using --after)\n'))
749 752
750 753 return errors != 0
751 754
752 755 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
753 756 runargs=None, appendpid=False):
754 757 '''Run a command as a service.'''
755 758
756 759 def writepid(pid):
757 760 if opts['pid_file']:
758 761 if appendpid:
759 762 mode = 'a'
760 763 else:
761 764 mode = 'w'
762 765 fp = open(opts['pid_file'], mode)
763 766 fp.write(str(pid) + '\n')
764 767 fp.close()
765 768
766 769 if opts['daemon'] and not opts['daemon_pipefds']:
767 770 # Signal child process startup with file removal
768 771 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
769 772 os.close(lockfd)
770 773 try:
771 774 if not runargs:
772 775 runargs = util.hgcmd() + sys.argv[1:]
773 776 runargs.append('--daemon-pipefds=%s' % lockpath)
774 777 # Don't pass --cwd to the child process, because we've already
775 778 # changed directory.
776 779 for i in xrange(1, len(runargs)):
777 780 if runargs[i].startswith('--cwd='):
778 781 del runargs[i]
779 782 break
780 783 elif runargs[i].startswith('--cwd'):
781 784 del runargs[i:i + 2]
782 785 break
783 786 def condfn():
784 787 return not os.path.exists(lockpath)
785 788 pid = util.rundetached(runargs, condfn)
786 789 if pid < 0:
787 790 raise error.Abort(_('child process failed to start'))
788 791 writepid(pid)
789 792 finally:
790 793 try:
791 794 os.unlink(lockpath)
792 795 except OSError as e:
793 796 if e.errno != errno.ENOENT:
794 797 raise
795 798 if parentfn:
796 799 return parentfn(pid)
797 800 else:
798 801 return
799 802
800 803 if initfn:
801 804 initfn()
802 805
803 806 if not opts['daemon']:
804 807 writepid(os.getpid())
805 808
806 809 if opts['daemon_pipefds']:
807 810 lockpath = opts['daemon_pipefds']
808 811 try:
809 812 os.setsid()
810 813 except AttributeError:
811 814 pass
812 815 os.unlink(lockpath)
813 816 util.hidewindow()
814 817 sys.stdout.flush()
815 818 sys.stderr.flush()
816 819
817 820 nullfd = os.open(os.devnull, os.O_RDWR)
818 821 logfilefd = nullfd
819 822 if logfile:
820 823 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
821 824 os.dup2(nullfd, 0)
822 825 os.dup2(logfilefd, 1)
823 826 os.dup2(logfilefd, 2)
824 827 if nullfd not in (0, 1, 2):
825 828 os.close(nullfd)
826 829 if logfile and logfilefd not in (0, 1, 2):
827 830 os.close(logfilefd)
828 831
829 832 if runfn:
830 833 return runfn()
831 834
832 835 ## facility to let extension process additional data into an import patch
833 836 # list of identifier to be executed in order
834 837 extrapreimport = [] # run before commit
835 838 extrapostimport = [] # run after commit
836 839 # mapping from identifier to actual import function
837 840 #
838 841 # 'preimport' are run before the commit is made and are provided the following
839 842 # arguments:
840 843 # - repo: the localrepository instance,
841 844 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
842 845 # - extra: the future extra dictionary of the changeset, please mutate it,
843 846 # - opts: the import options.
844 847 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
845 848 # mutation of in memory commit and more. Feel free to rework the code to get
846 849 # there.
847 850 extrapreimportmap = {}
848 851 # 'postimport' are run after the commit is made and are provided the following
849 852 # argument:
850 853 # - ctx: the changectx created by import.
851 854 extrapostimportmap = {}
852 855
853 856 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
854 857 """Utility function used by commands.import to import a single patch
855 858
856 859 This function is explicitly defined here to help the evolve extension to
857 860 wrap this part of the import logic.
858 861
859 862 The API is currently a bit ugly because it a simple code translation from
860 863 the import command. Feel free to make it better.
861 864
862 865 :hunk: a patch (as a binary string)
863 866 :parents: nodes that will be parent of the created commit
864 867 :opts: the full dict of option passed to the import command
865 868 :msgs: list to save commit message to.
866 869 (used in case we need to save it when failing)
867 870 :updatefunc: a function that update a repo to a given node
868 871 updatefunc(<repo>, <node>)
869 872 """
870 873 # avoid cycle context -> subrepo -> cmdutil
871 874 import context
872 875 extractdata = patch.extract(ui, hunk)
873 876 tmpname = extractdata.get('filename')
874 877 message = extractdata.get('message')
875 878 user = extractdata.get('user')
876 879 date = extractdata.get('date')
877 880 branch = extractdata.get('branch')
878 881 nodeid = extractdata.get('nodeid')
879 882 p1 = extractdata.get('p1')
880 883 p2 = extractdata.get('p2')
881 884
882 885 update = not opts.get('bypass')
883 886 strip = opts["strip"]
884 887 prefix = opts["prefix"]
885 888 sim = float(opts.get('similarity') or 0)
886 889 if not tmpname:
887 890 return (None, None, False)
888 891 msg = _('applied to working directory')
889 892
890 893 rejects = False
891 894
892 895 try:
893 896 cmdline_message = logmessage(ui, opts)
894 897 if cmdline_message:
895 898 # pickup the cmdline msg
896 899 message = cmdline_message
897 900 elif message:
898 901 # pickup the patch msg
899 902 message = message.strip()
900 903 else:
901 904 # launch the editor
902 905 message = None
903 906 ui.debug('message:\n%s\n' % message)
904 907
905 908 if len(parents) == 1:
906 909 parents.append(repo[nullid])
907 910 if opts.get('exact'):
908 911 if not nodeid or not p1:
909 912 raise error.Abort(_('not a Mercurial patch'))
910 913 p1 = repo[p1]
911 914 p2 = repo[p2 or nullid]
912 915 elif p2:
913 916 try:
914 917 p1 = repo[p1]
915 918 p2 = repo[p2]
916 919 # Without any options, consider p2 only if the
917 920 # patch is being applied on top of the recorded
918 921 # first parent.
919 922 if p1 != parents[0]:
920 923 p1 = parents[0]
921 924 p2 = repo[nullid]
922 925 except error.RepoError:
923 926 p1, p2 = parents
924 927 if p2.node() == nullid:
925 928 ui.warn(_("warning: import the patch as a normal revision\n"
926 929 "(use --exact to import the patch as a merge)\n"))
927 930 else:
928 931 p1, p2 = parents
929 932
930 933 n = None
931 934 if update:
932 935 if p1 != parents[0]:
933 936 updatefunc(repo, p1.node())
934 937 if p2 != parents[1]:
935 938 repo.setparents(p1.node(), p2.node())
936 939
937 940 if opts.get('exact') or opts.get('import_branch'):
938 941 repo.dirstate.setbranch(branch or 'default')
939 942
940 943 partial = opts.get('partial', False)
941 944 files = set()
942 945 try:
943 946 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
944 947 files=files, eolmode=None, similarity=sim / 100.0)
945 948 except patch.PatchError as e:
946 949 if not partial:
947 950 raise error.Abort(str(e))
948 951 if partial:
949 952 rejects = True
950 953
951 954 files = list(files)
952 955 if opts.get('no_commit'):
953 956 if message:
954 957 msgs.append(message)
955 958 else:
956 959 if opts.get('exact') or p2:
957 960 # If you got here, you either use --force and know what
958 961 # you are doing or used --exact or a merge patch while
959 962 # being updated to its first parent.
960 963 m = None
961 964 else:
962 965 m = scmutil.matchfiles(repo, files or [])
963 966 editform = mergeeditform(repo[None], 'import.normal')
964 967 if opts.get('exact'):
965 968 editor = None
966 969 else:
967 970 editor = getcommiteditor(editform=editform, **opts)
968 971 allowemptyback = repo.ui.backupconfig('ui', 'allowemptycommit')
969 972 extra = {}
970 973 for idfunc in extrapreimport:
971 974 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
972 975 try:
973 976 if partial:
974 977 repo.ui.setconfig('ui', 'allowemptycommit', True)
975 978 n = repo.commit(message, opts.get('user') or user,
976 979 opts.get('date') or date, match=m,
977 980 editor=editor, extra=extra)
978 981 for idfunc in extrapostimport:
979 982 extrapostimportmap[idfunc](repo[n])
980 983 finally:
981 984 repo.ui.restoreconfig(allowemptyback)
982 985 else:
983 986 if opts.get('exact') or opts.get('import_branch'):
984 987 branch = branch or 'default'
985 988 else:
986 989 branch = p1.branch()
987 990 store = patch.filestore()
988 991 try:
989 992 files = set()
990 993 try:
991 994 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
992 995 files, eolmode=None)
993 996 except patch.PatchError as e:
994 997 raise error.Abort(str(e))
995 998 if opts.get('exact'):
996 999 editor = None
997 1000 else:
998 1001 editor = getcommiteditor(editform='import.bypass')
999 1002 memctx = context.makememctx(repo, (p1.node(), p2.node()),
1000 1003 message,
1001 1004 opts.get('user') or user,
1002 1005 opts.get('date') or date,
1003 1006 branch, files, store,
1004 1007 editor=editor)
1005 1008 n = memctx.commit()
1006 1009 finally:
1007 1010 store.close()
1008 1011 if opts.get('exact') and opts.get('no_commit'):
1009 1012 # --exact with --no-commit is still useful in that it does merge
1010 1013 # and branch bits
1011 1014 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1012 1015 elif opts.get('exact') and hex(n) != nodeid:
1013 1016 raise error.Abort(_('patch is damaged or loses information'))
1014 1017 if n:
1015 1018 # i18n: refers to a short changeset id
1016 1019 msg = _('created %s') % short(n)
1017 1020 return (msg, n, rejects)
1018 1021 finally:
1019 1022 os.unlink(tmpname)
1020 1023
1021 1024 # facility to let extensions include additional data in an exported patch
1022 1025 # list of identifiers to be executed in order
1023 1026 extraexport = []
1024 1027 # mapping from identifier to actual export function
1025 1028 # function as to return a string to be added to the header or None
1026 1029 # it is given two arguments (sequencenumber, changectx)
1027 1030 extraexportmap = {}
1028 1031
1029 1032 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1030 1033 opts=None, match=None):
1031 1034 '''export changesets as hg patches.'''
1032 1035
1033 1036 total = len(revs)
1034 1037 revwidth = max([len(str(rev)) for rev in revs])
1035 1038 filemode = {}
1036 1039
1037 1040 def single(rev, seqno, fp):
1038 1041 ctx = repo[rev]
1039 1042 node = ctx.node()
1040 1043 parents = [p.node() for p in ctx.parents() if p]
1041 1044 branch = ctx.branch()
1042 1045 if switch_parent:
1043 1046 parents.reverse()
1044 1047
1045 1048 if parents:
1046 1049 prev = parents[0]
1047 1050 else:
1048 1051 prev = nullid
1049 1052
1050 1053 shouldclose = False
1051 1054 if not fp and len(template) > 0:
1052 1055 desc_lines = ctx.description().rstrip().split('\n')
1053 1056 desc = desc_lines[0] #Commit always has a first line.
1054 1057 fp = makefileobj(repo, template, node, desc=desc, total=total,
1055 1058 seqno=seqno, revwidth=revwidth, mode='wb',
1056 1059 modemap=filemode)
1057 1060 shouldclose = True
1058 1061 if fp and not getattr(fp, 'name', '<unnamed>').startswith('<'):
1059 1062 repo.ui.note("%s\n" % fp.name)
1060 1063
1061 1064 if not fp:
1062 1065 write = repo.ui.write
1063 1066 else:
1064 1067 def write(s, **kw):
1065 1068 fp.write(s)
1066 1069
1067 1070 write("# HG changeset patch\n")
1068 1071 write("# User %s\n" % ctx.user())
1069 1072 write("# Date %d %d\n" % ctx.date())
1070 1073 write("# %s\n" % util.datestr(ctx.date()))
1071 1074 if branch and branch != 'default':
1072 1075 write("# Branch %s\n" % branch)
1073 1076 write("# Node ID %s\n" % hex(node))
1074 1077 write("# Parent %s\n" % hex(prev))
1075 1078 if len(parents) > 1:
1076 1079 write("# Parent %s\n" % hex(parents[1]))
1077 1080
1078 1081 for headerid in extraexport:
1079 1082 header = extraexportmap[headerid](seqno, ctx)
1080 1083 if header is not None:
1081 1084 write('# %s\n' % header)
1082 1085 write(ctx.description().rstrip())
1083 1086 write("\n\n")
1084 1087
1085 1088 for chunk, label in patch.diffui(repo, prev, node, match, opts=opts):
1086 1089 write(chunk, label=label)
1087 1090
1088 1091 if shouldclose:
1089 1092 fp.close()
1090 1093
1091 1094 for seqno, rev in enumerate(revs):
1092 1095 single(rev, seqno + 1, fp)
1093 1096
1094 1097 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1095 1098 changes=None, stat=False, fp=None, prefix='',
1096 1099 root='', listsubrepos=False):
1097 1100 '''show diff or diffstat.'''
1098 1101 if fp is None:
1099 1102 write = ui.write
1100 1103 else:
1101 1104 def write(s, **kw):
1102 1105 fp.write(s)
1103 1106
1104 1107 if root:
1105 1108 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1106 1109 else:
1107 1110 relroot = ''
1108 1111 if relroot != '':
1109 1112 # XXX relative roots currently don't work if the root is within a
1110 1113 # subrepo
1111 1114 uirelroot = match.uipath(relroot)
1112 1115 relroot += '/'
1113 1116 for matchroot in match.files():
1114 1117 if not matchroot.startswith(relroot):
1115 1118 ui.warn(_('warning: %s not inside relative root %s\n') % (
1116 1119 match.uipath(matchroot), uirelroot))
1117 1120
1118 1121 if stat:
1119 1122 diffopts = diffopts.copy(context=0)
1120 1123 width = 80
1121 1124 if not ui.plain():
1122 1125 width = ui.termwidth()
1123 1126 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1124 1127 prefix=prefix, relroot=relroot)
1125 1128 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1126 1129 width=width,
1127 1130 git=diffopts.git):
1128 1131 write(chunk, label=label)
1129 1132 else:
1130 1133 for chunk, label in patch.diffui(repo, node1, node2, match,
1131 1134 changes, diffopts, prefix=prefix,
1132 1135 relroot=relroot):
1133 1136 write(chunk, label=label)
1134 1137
1135 1138 if listsubrepos:
1136 1139 ctx1 = repo[node1]
1137 1140 ctx2 = repo[node2]
1138 1141 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1139 1142 tempnode2 = node2
1140 1143 try:
1141 1144 if node2 is not None:
1142 1145 tempnode2 = ctx2.substate[subpath][1]
1143 1146 except KeyError:
1144 1147 # A subrepo that existed in node1 was deleted between node1 and
1145 1148 # node2 (inclusive). Thus, ctx2's substate won't contain that
1146 1149 # subpath. The best we can do is to ignore it.
1147 1150 tempnode2 = None
1148 1151 submatch = matchmod.narrowmatcher(subpath, match)
1149 1152 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1150 1153 stat=stat, fp=fp, prefix=prefix)
1151 1154
1152 1155 class changeset_printer(object):
1153 1156 '''show changeset information when templating not requested.'''
1154 1157
1155 1158 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1156 1159 self.ui = ui
1157 1160 self.repo = repo
1158 1161 self.buffered = buffered
1159 1162 self.matchfn = matchfn
1160 1163 self.diffopts = diffopts
1161 1164 self.header = {}
1162 1165 self.hunk = {}
1163 1166 self.lastheader = None
1164 1167 self.footer = None
1165 1168
1166 1169 def flush(self, ctx):
1167 1170 rev = ctx.rev()
1168 1171 if rev in self.header:
1169 1172 h = self.header[rev]
1170 1173 if h != self.lastheader:
1171 1174 self.lastheader = h
1172 1175 self.ui.write(h)
1173 1176 del self.header[rev]
1174 1177 if rev in self.hunk:
1175 1178 self.ui.write(self.hunk[rev])
1176 1179 del self.hunk[rev]
1177 1180 return 1
1178 1181 return 0
1179 1182
1180 1183 def close(self):
1181 1184 if self.footer:
1182 1185 self.ui.write(self.footer)
1183 1186
1184 1187 def show(self, ctx, copies=None, matchfn=None, **props):
1185 1188 if self.buffered:
1186 1189 self.ui.pushbuffer(labeled=True)
1187 1190 self._show(ctx, copies, matchfn, props)
1188 1191 self.hunk[ctx.rev()] = self.ui.popbuffer()
1189 1192 else:
1190 1193 self._show(ctx, copies, matchfn, props)
1191 1194
1192 1195 def _show(self, ctx, copies, matchfn, props):
1193 1196 '''show a single changeset or file revision'''
1194 1197 changenode = ctx.node()
1195 1198 rev = ctx.rev()
1196 1199 if self.ui.debugflag:
1197 1200 hexfunc = hex
1198 1201 else:
1199 1202 hexfunc = short
1200 1203 # as of now, wctx.node() and wctx.rev() return None, but we want to
1201 1204 # show the same values as {node} and {rev} templatekw
1202 1205 revnode = (scmutil.intrev(rev), hexfunc(bin(ctx.hex())))
1203 1206
1204 1207 if self.ui.quiet:
1205 1208 self.ui.write("%d:%s\n" % revnode, label='log.node')
1206 1209 return
1207 1210
1208 1211 date = util.datestr(ctx.date())
1209 1212
1210 1213 # i18n: column positioning for "hg log"
1211 1214 self.ui.write(_("changeset: %d:%s\n") % revnode,
1212 1215 label='log.changeset changeset.%s' % ctx.phasestr())
1213 1216
1214 1217 # branches are shown first before any other names due to backwards
1215 1218 # compatibility
1216 1219 branch = ctx.branch()
1217 1220 # don't show the default branch name
1218 1221 if branch != 'default':
1219 1222 # i18n: column positioning for "hg log"
1220 1223 self.ui.write(_("branch: %s\n") % branch,
1221 1224 label='log.branch')
1222 1225
1223 1226 for name, ns in self.repo.names.iteritems():
1224 1227 # branches has special logic already handled above, so here we just
1225 1228 # skip it
1226 1229 if name == 'branches':
1227 1230 continue
1228 1231 # we will use the templatename as the color name since those two
1229 1232 # should be the same
1230 1233 for name in ns.names(self.repo, changenode):
1231 1234 self.ui.write(ns.logfmt % name,
1232 1235 label='log.%s' % ns.colorname)
1233 1236 if self.ui.debugflag:
1234 1237 # i18n: column positioning for "hg log"
1235 1238 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1236 1239 label='log.phase')
1237 1240 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1238 1241 label = 'log.parent changeset.%s' % pctx.phasestr()
1239 1242 # i18n: column positioning for "hg log"
1240 1243 self.ui.write(_("parent: %d:%s\n")
1241 1244 % (pctx.rev(), hexfunc(pctx.node())),
1242 1245 label=label)
1243 1246
1244 1247 if self.ui.debugflag and rev is not None:
1245 1248 mnode = ctx.manifestnode()
1246 1249 # i18n: column positioning for "hg log"
1247 1250 self.ui.write(_("manifest: %d:%s\n") %
1248 1251 (self.repo.manifest.rev(mnode), hex(mnode)),
1249 1252 label='ui.debug log.manifest')
1250 1253 # i18n: column positioning for "hg log"
1251 1254 self.ui.write(_("user: %s\n") % ctx.user(),
1252 1255 label='log.user')
1253 1256 # i18n: column positioning for "hg log"
1254 1257 self.ui.write(_("date: %s\n") % date,
1255 1258 label='log.date')
1256 1259
1257 1260 if self.ui.debugflag:
1258 1261 files = ctx.p1().status(ctx)[:3]
1259 1262 for key, value in zip([# i18n: column positioning for "hg log"
1260 1263 _("files:"),
1261 1264 # i18n: column positioning for "hg log"
1262 1265 _("files+:"),
1263 1266 # i18n: column positioning for "hg log"
1264 1267 _("files-:")], files):
1265 1268 if value:
1266 1269 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1267 1270 label='ui.debug log.files')
1268 1271 elif ctx.files() and self.ui.verbose:
1269 1272 # i18n: column positioning for "hg log"
1270 1273 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1271 1274 label='ui.note log.files')
1272 1275 if copies and self.ui.verbose:
1273 1276 copies = ['%s (%s)' % c for c in copies]
1274 1277 # i18n: column positioning for "hg log"
1275 1278 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1276 1279 label='ui.note log.copies')
1277 1280
1278 1281 extra = ctx.extra()
1279 1282 if extra and self.ui.debugflag:
1280 1283 for key, value in sorted(extra.items()):
1281 1284 # i18n: column positioning for "hg log"
1282 1285 self.ui.write(_("extra: %s=%s\n")
1283 1286 % (key, value.encode('string_escape')),
1284 1287 label='ui.debug log.extra')
1285 1288
1286 1289 description = ctx.description().strip()
1287 1290 if description:
1288 1291 if self.ui.verbose:
1289 1292 self.ui.write(_("description:\n"),
1290 1293 label='ui.note log.description')
1291 1294 self.ui.write(description,
1292 1295 label='ui.note log.description')
1293 1296 self.ui.write("\n\n")
1294 1297 else:
1295 1298 # i18n: column positioning for "hg log"
1296 1299 self.ui.write(_("summary: %s\n") %
1297 1300 description.splitlines()[0],
1298 1301 label='log.summary')
1299 1302 self.ui.write("\n")
1300 1303
1301 1304 self.showpatch(ctx, matchfn)
1302 1305
1303 1306 def showpatch(self, ctx, matchfn):
1304 1307 if not matchfn:
1305 1308 matchfn = self.matchfn
1306 1309 if matchfn:
1307 1310 stat = self.diffopts.get('stat')
1308 1311 diff = self.diffopts.get('patch')
1309 1312 diffopts = patch.diffallopts(self.ui, self.diffopts)
1310 1313 node = ctx.node()
1311 1314 prev = ctx.p1()
1312 1315 if stat:
1313 1316 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1314 1317 match=matchfn, stat=True)
1315 1318 if diff:
1316 1319 if stat:
1317 1320 self.ui.write("\n")
1318 1321 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1319 1322 match=matchfn, stat=False)
1320 1323 self.ui.write("\n")
1321 1324
1322 1325 class jsonchangeset(changeset_printer):
1323 1326 '''format changeset information.'''
1324 1327
1325 1328 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1326 1329 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1327 1330 self.cache = {}
1328 1331 self._first = True
1329 1332
1330 1333 def close(self):
1331 1334 if not self._first:
1332 1335 self.ui.write("\n]\n")
1333 1336 else:
1334 1337 self.ui.write("[]\n")
1335 1338
1336 1339 def _show(self, ctx, copies, matchfn, props):
1337 1340 '''show a single changeset or file revision'''
1338 1341 rev = ctx.rev()
1339 1342 if rev is None:
1340 1343 jrev = jnode = 'null'
1341 1344 else:
1342 1345 jrev = str(rev)
1343 1346 jnode = '"%s"' % hex(ctx.node())
1344 1347 j = encoding.jsonescape
1345 1348
1346 1349 if self._first:
1347 1350 self.ui.write("[\n {")
1348 1351 self._first = False
1349 1352 else:
1350 1353 self.ui.write(",\n {")
1351 1354
1352 1355 if self.ui.quiet:
1353 1356 self.ui.write('\n "rev": %s' % jrev)
1354 1357 self.ui.write(',\n "node": %s' % jnode)
1355 1358 self.ui.write('\n }')
1356 1359 return
1357 1360
1358 1361 self.ui.write('\n "rev": %s' % jrev)
1359 1362 self.ui.write(',\n "node": %s' % jnode)
1360 1363 self.ui.write(',\n "branch": "%s"' % j(ctx.branch()))
1361 1364 self.ui.write(',\n "phase": "%s"' % ctx.phasestr())
1362 1365 self.ui.write(',\n "user": "%s"' % j(ctx.user()))
1363 1366 self.ui.write(',\n "date": [%d, %d]' % ctx.date())
1364 1367 self.ui.write(',\n "desc": "%s"' % j(ctx.description()))
1365 1368
1366 1369 self.ui.write(',\n "bookmarks": [%s]' %
1367 1370 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1368 1371 self.ui.write(',\n "tags": [%s]' %
1369 1372 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1370 1373 self.ui.write(',\n "parents": [%s]' %
1371 1374 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1372 1375
1373 1376 if self.ui.debugflag:
1374 1377 if rev is None:
1375 1378 jmanifestnode = 'null'
1376 1379 else:
1377 1380 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1378 1381 self.ui.write(',\n "manifest": %s' % jmanifestnode)
1379 1382
1380 1383 self.ui.write(',\n "extra": {%s}' %
1381 1384 ", ".join('"%s": "%s"' % (j(k), j(v))
1382 1385 for k, v in ctx.extra().items()))
1383 1386
1384 1387 files = ctx.p1().status(ctx)
1385 1388 self.ui.write(',\n "modified": [%s]' %
1386 1389 ", ".join('"%s"' % j(f) for f in files[0]))
1387 1390 self.ui.write(',\n "added": [%s]' %
1388 1391 ", ".join('"%s"' % j(f) for f in files[1]))
1389 1392 self.ui.write(',\n "removed": [%s]' %
1390 1393 ", ".join('"%s"' % j(f) for f in files[2]))
1391 1394
1392 1395 elif self.ui.verbose:
1393 1396 self.ui.write(',\n "files": [%s]' %
1394 1397 ", ".join('"%s"' % j(f) for f in ctx.files()))
1395 1398
1396 1399 if copies:
1397 1400 self.ui.write(',\n "copies": {%s}' %
1398 1401 ", ".join('"%s": "%s"' % (j(k), j(v))
1399 1402 for k, v in copies))
1400 1403
1401 1404 matchfn = self.matchfn
1402 1405 if matchfn:
1403 1406 stat = self.diffopts.get('stat')
1404 1407 diff = self.diffopts.get('patch')
1405 1408 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1406 1409 node, prev = ctx.node(), ctx.p1().node()
1407 1410 if stat:
1408 1411 self.ui.pushbuffer()
1409 1412 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1410 1413 match=matchfn, stat=True)
1411 1414 self.ui.write(',\n "diffstat": "%s"' % j(self.ui.popbuffer()))
1412 1415 if diff:
1413 1416 self.ui.pushbuffer()
1414 1417 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1415 1418 match=matchfn, stat=False)
1416 1419 self.ui.write(',\n "diff": "%s"' % j(self.ui.popbuffer()))
1417 1420
1418 1421 self.ui.write("\n }")
1419 1422
1420 1423 class changeset_templater(changeset_printer):
1421 1424 '''format changeset information.'''
1422 1425
1423 1426 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1424 1427 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1425 1428 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
1426 1429 defaulttempl = {
1427 1430 'parent': '{rev}:{node|formatnode} ',
1428 1431 'manifest': '{rev}:{node|formatnode}',
1429 1432 'file_copy': '{name} ({source})',
1430 1433 'extra': '{key}={value|stringescape}'
1431 1434 }
1432 1435 # filecopy is preserved for compatibility reasons
1433 1436 defaulttempl['filecopy'] = defaulttempl['file_copy']
1434 1437 self.t = templater.templater(mapfile, {'formatnode': formatnode},
1435 1438 cache=defaulttempl)
1436 1439 if tmpl:
1437 1440 self.t.cache['changeset'] = tmpl
1438 1441
1439 1442 self.cache = {}
1440 1443
1441 1444 # find correct templates for current mode
1442 1445 tmplmodes = [
1443 1446 (True, None),
1444 1447 (self.ui.verbose, 'verbose'),
1445 1448 (self.ui.quiet, 'quiet'),
1446 1449 (self.ui.debugflag, 'debug'),
1447 1450 ]
1448 1451
1449 1452 self._parts = {'header': '', 'footer': '', 'changeset': 'changeset',
1450 1453 'docheader': '', 'docfooter': ''}
1451 1454 for mode, postfix in tmplmodes:
1452 1455 for t in self._parts:
1453 1456 cur = t
1454 1457 if postfix:
1455 1458 cur += "_" + postfix
1456 1459 if mode and cur in self.t:
1457 1460 self._parts[t] = cur
1458 1461
1459 1462 if self._parts['docheader']:
1460 1463 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1461 1464
1462 1465 def close(self):
1463 1466 if self._parts['docfooter']:
1464 1467 if not self.footer:
1465 1468 self.footer = ""
1466 1469 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1467 1470 return super(changeset_templater, self).close()
1468 1471
1469 1472 def _show(self, ctx, copies, matchfn, props):
1470 1473 '''show a single changeset or file revision'''
1471 1474 props = props.copy()
1472 1475 props.update(templatekw.keywords)
1473 1476 props['templ'] = self.t
1474 1477 props['ctx'] = ctx
1475 1478 props['repo'] = self.repo
1476 1479 props['revcache'] = {'copies': copies}
1477 1480 props['cache'] = self.cache
1478 1481
1479 1482 try:
1480 1483 # write header
1481 1484 if self._parts['header']:
1482 1485 h = templater.stringify(self.t(self._parts['header'], **props))
1483 1486 if self.buffered:
1484 1487 self.header[ctx.rev()] = h
1485 1488 else:
1486 1489 if self.lastheader != h:
1487 1490 self.lastheader = h
1488 1491 self.ui.write(h)
1489 1492
1490 1493 # write changeset metadata, then patch if requested
1491 1494 key = self._parts['changeset']
1492 1495 self.ui.write(templater.stringify(self.t(key, **props)))
1493 1496 self.showpatch(ctx, matchfn)
1494 1497
1495 1498 if self._parts['footer']:
1496 1499 if not self.footer:
1497 1500 self.footer = templater.stringify(
1498 1501 self.t(self._parts['footer'], **props))
1499 1502 except KeyError as inst:
1500 1503 msg = _("%s: no key named '%s'")
1501 1504 raise error.Abort(msg % (self.t.mapfile, inst.args[0]))
1502 1505 except SyntaxError as inst:
1503 1506 raise error.Abort('%s: %s' % (self.t.mapfile, inst.args[0]))
1504 1507
1505 1508 def gettemplate(ui, tmpl, style):
1506 1509 """
1507 1510 Find the template matching the given template spec or style.
1508 1511 """
1509 1512
1510 1513 # ui settings
1511 1514 if not tmpl and not style: # template are stronger than style
1512 1515 tmpl = ui.config('ui', 'logtemplate')
1513 1516 if tmpl:
1514 1517 try:
1515 1518 tmpl = templater.unquotestring(tmpl)
1516 1519 except SyntaxError:
1517 1520 pass
1518 1521 return tmpl, None
1519 1522 else:
1520 1523 style = util.expandpath(ui.config('ui', 'style', ''))
1521 1524
1522 1525 if not tmpl and style:
1523 1526 mapfile = style
1524 1527 if not os.path.split(mapfile)[0]:
1525 1528 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1526 1529 or templater.templatepath(mapfile))
1527 1530 if mapname:
1528 1531 mapfile = mapname
1529 1532 return None, mapfile
1530 1533
1531 1534 if not tmpl:
1532 1535 return None, None
1533 1536
1534 1537 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1535 1538
1536 1539 def show_changeset(ui, repo, opts, buffered=False):
1537 1540 """show one changeset using template or regular display.
1538 1541
1539 1542 Display format will be the first non-empty hit of:
1540 1543 1. option 'template'
1541 1544 2. option 'style'
1542 1545 3. [ui] setting 'logtemplate'
1543 1546 4. [ui] setting 'style'
1544 1547 If all of these values are either the unset or the empty string,
1545 1548 regular display via changeset_printer() is done.
1546 1549 """
1547 1550 # options
1548 1551 matchfn = None
1549 1552 if opts.get('patch') or opts.get('stat'):
1550 1553 matchfn = scmutil.matchall(repo)
1551 1554
1552 1555 if opts.get('template') == 'json':
1553 1556 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1554 1557
1555 1558 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1556 1559
1557 1560 if not tmpl and not mapfile:
1558 1561 return changeset_printer(ui, repo, matchfn, opts, buffered)
1559 1562
1560 1563 try:
1561 1564 t = changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile,
1562 1565 buffered)
1563 1566 except SyntaxError as inst:
1564 1567 raise error.Abort(inst.args[0])
1565 1568 return t
1566 1569
1567 1570 def showmarker(ui, marker):
1568 1571 """utility function to display obsolescence marker in a readable way
1569 1572
1570 1573 To be used by debug function."""
1571 1574 ui.write(hex(marker.precnode()))
1572 1575 for repl in marker.succnodes():
1573 1576 ui.write(' ')
1574 1577 ui.write(hex(repl))
1575 1578 ui.write(' %X ' % marker.flags())
1576 1579 parents = marker.parentnodes()
1577 1580 if parents is not None:
1578 1581 ui.write('{%s} ' % ', '.join(hex(p) for p in parents))
1579 1582 ui.write('(%s) ' % util.datestr(marker.date()))
1580 1583 ui.write('{%s}' % (', '.join('%r: %r' % t for t in
1581 1584 sorted(marker.metadata().items())
1582 1585 if t[0] != 'date')))
1583 1586 ui.write('\n')
1584 1587
1585 1588 def finddate(ui, repo, date):
1586 1589 """Find the tipmost changeset that matches the given date spec"""
1587 1590
1588 1591 df = util.matchdate(date)
1589 1592 m = scmutil.matchall(repo)
1590 1593 results = {}
1591 1594
1592 1595 def prep(ctx, fns):
1593 1596 d = ctx.date()
1594 1597 if df(d[0]):
1595 1598 results[ctx.rev()] = d
1596 1599
1597 1600 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1598 1601 rev = ctx.rev()
1599 1602 if rev in results:
1600 1603 ui.status(_("found revision %s from %s\n") %
1601 1604 (rev, util.datestr(results[rev])))
1602 1605 return str(rev)
1603 1606
1604 1607 raise error.Abort(_("revision matching date not found"))
1605 1608
1606 1609 def increasingwindows(windowsize=8, sizelimit=512):
1607 1610 while True:
1608 1611 yield windowsize
1609 1612 if windowsize < sizelimit:
1610 1613 windowsize *= 2
1611 1614
1612 1615 class FileWalkError(Exception):
1613 1616 pass
1614 1617
1615 1618 def walkfilerevs(repo, match, follow, revs, fncache):
1616 1619 '''Walks the file history for the matched files.
1617 1620
1618 1621 Returns the changeset revs that are involved in the file history.
1619 1622
1620 1623 Throws FileWalkError if the file history can't be walked using
1621 1624 filelogs alone.
1622 1625 '''
1623 1626 wanted = set()
1624 1627 copies = []
1625 1628 minrev, maxrev = min(revs), max(revs)
1626 1629 def filerevgen(filelog, last):
1627 1630 """
1628 1631 Only files, no patterns. Check the history of each file.
1629 1632
1630 1633 Examines filelog entries within minrev, maxrev linkrev range
1631 1634 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1632 1635 tuples in backwards order
1633 1636 """
1634 1637 cl_count = len(repo)
1635 1638 revs = []
1636 1639 for j in xrange(0, last + 1):
1637 1640 linkrev = filelog.linkrev(j)
1638 1641 if linkrev < minrev:
1639 1642 continue
1640 1643 # only yield rev for which we have the changelog, it can
1641 1644 # happen while doing "hg log" during a pull or commit
1642 1645 if linkrev >= cl_count:
1643 1646 break
1644 1647
1645 1648 parentlinkrevs = []
1646 1649 for p in filelog.parentrevs(j):
1647 1650 if p != nullrev:
1648 1651 parentlinkrevs.append(filelog.linkrev(p))
1649 1652 n = filelog.node(j)
1650 1653 revs.append((linkrev, parentlinkrevs,
1651 1654 follow and filelog.renamed(n)))
1652 1655
1653 1656 return reversed(revs)
1654 1657 def iterfiles():
1655 1658 pctx = repo['.']
1656 1659 for filename in match.files():
1657 1660 if follow:
1658 1661 if filename not in pctx:
1659 1662 raise error.Abort(_('cannot follow file not in parent '
1660 1663 'revision: "%s"') % filename)
1661 1664 yield filename, pctx[filename].filenode()
1662 1665 else:
1663 1666 yield filename, None
1664 1667 for filename_node in copies:
1665 1668 yield filename_node
1666 1669
1667 1670 for file_, node in iterfiles():
1668 1671 filelog = repo.file(file_)
1669 1672 if not len(filelog):
1670 1673 if node is None:
1671 1674 # A zero count may be a directory or deleted file, so
1672 1675 # try to find matching entries on the slow path.
1673 1676 if follow:
1674 1677 raise error.Abort(
1675 1678 _('cannot follow nonexistent file: "%s"') % file_)
1676 1679 raise FileWalkError("Cannot walk via filelog")
1677 1680 else:
1678 1681 continue
1679 1682
1680 1683 if node is None:
1681 1684 last = len(filelog) - 1
1682 1685 else:
1683 1686 last = filelog.rev(node)
1684 1687
1685 1688 # keep track of all ancestors of the file
1686 1689 ancestors = set([filelog.linkrev(last)])
1687 1690
1688 1691 # iterate from latest to oldest revision
1689 1692 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1690 1693 if not follow:
1691 1694 if rev > maxrev:
1692 1695 continue
1693 1696 else:
1694 1697 # Note that last might not be the first interesting
1695 1698 # rev to us:
1696 1699 # if the file has been changed after maxrev, we'll
1697 1700 # have linkrev(last) > maxrev, and we still need
1698 1701 # to explore the file graph
1699 1702 if rev not in ancestors:
1700 1703 continue
1701 1704 # XXX insert 1327 fix here
1702 1705 if flparentlinkrevs:
1703 1706 ancestors.update(flparentlinkrevs)
1704 1707
1705 1708 fncache.setdefault(rev, []).append(file_)
1706 1709 wanted.add(rev)
1707 1710 if copied:
1708 1711 copies.append(copied)
1709 1712
1710 1713 return wanted
1711 1714
1712 1715 class _followfilter(object):
1713 1716 def __init__(self, repo, onlyfirst=False):
1714 1717 self.repo = repo
1715 1718 self.startrev = nullrev
1716 1719 self.roots = set()
1717 1720 self.onlyfirst = onlyfirst
1718 1721
1719 1722 def match(self, rev):
1720 1723 def realparents(rev):
1721 1724 if self.onlyfirst:
1722 1725 return self.repo.changelog.parentrevs(rev)[0:1]
1723 1726 else:
1724 1727 return filter(lambda x: x != nullrev,
1725 1728 self.repo.changelog.parentrevs(rev))
1726 1729
1727 1730 if self.startrev == nullrev:
1728 1731 self.startrev = rev
1729 1732 return True
1730 1733
1731 1734 if rev > self.startrev:
1732 1735 # forward: all descendants
1733 1736 if not self.roots:
1734 1737 self.roots.add(self.startrev)
1735 1738 for parent in realparents(rev):
1736 1739 if parent in self.roots:
1737 1740 self.roots.add(rev)
1738 1741 return True
1739 1742 else:
1740 1743 # backwards: all parents
1741 1744 if not self.roots:
1742 1745 self.roots.update(realparents(self.startrev))
1743 1746 if rev in self.roots:
1744 1747 self.roots.remove(rev)
1745 1748 self.roots.update(realparents(rev))
1746 1749 return True
1747 1750
1748 1751 return False
1749 1752
1750 1753 def walkchangerevs(repo, match, opts, prepare):
1751 1754 '''Iterate over files and the revs in which they changed.
1752 1755
1753 1756 Callers most commonly need to iterate backwards over the history
1754 1757 in which they are interested. Doing so has awful (quadratic-looking)
1755 1758 performance, so we use iterators in a "windowed" way.
1756 1759
1757 1760 We walk a window of revisions in the desired order. Within the
1758 1761 window, we first walk forwards to gather data, then in the desired
1759 1762 order (usually backwards) to display it.
1760 1763
1761 1764 This function returns an iterator yielding contexts. Before
1762 1765 yielding each context, the iterator will first call the prepare
1763 1766 function on each context in the window in forward order.'''
1764 1767
1765 1768 follow = opts.get('follow') or opts.get('follow_first')
1766 1769 revs = _logrevs(repo, opts)
1767 1770 if not revs:
1768 1771 return []
1769 1772 wanted = set()
1770 1773 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1771 1774 opts.get('removed'))
1772 1775 fncache = {}
1773 1776 change = repo.changectx
1774 1777
1775 1778 # First step is to fill wanted, the set of revisions that we want to yield.
1776 1779 # When it does not induce extra cost, we also fill fncache for revisions in
1777 1780 # wanted: a cache of filenames that were changed (ctx.files()) and that
1778 1781 # match the file filtering conditions.
1779 1782
1780 1783 if match.always():
1781 1784 # No files, no patterns. Display all revs.
1782 1785 wanted = revs
1783 1786 elif not slowpath:
1784 1787 # We only have to read through the filelog to find wanted revisions
1785 1788
1786 1789 try:
1787 1790 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1788 1791 except FileWalkError:
1789 1792 slowpath = True
1790 1793
1791 1794 # We decided to fall back to the slowpath because at least one
1792 1795 # of the paths was not a file. Check to see if at least one of them
1793 1796 # existed in history, otherwise simply return
1794 1797 for path in match.files():
1795 1798 if path == '.' or path in repo.store:
1796 1799 break
1797 1800 else:
1798 1801 return []
1799 1802
1800 1803 if slowpath:
1801 1804 # We have to read the changelog to match filenames against
1802 1805 # changed files
1803 1806
1804 1807 if follow:
1805 1808 raise error.Abort(_('can only follow copies/renames for explicit '
1806 1809 'filenames'))
1807 1810
1808 1811 # The slow path checks files modified in every changeset.
1809 1812 # This is really slow on large repos, so compute the set lazily.
1810 1813 class lazywantedset(object):
1811 1814 def __init__(self):
1812 1815 self.set = set()
1813 1816 self.revs = set(revs)
1814 1817
1815 1818 # No need to worry about locality here because it will be accessed
1816 1819 # in the same order as the increasing window below.
1817 1820 def __contains__(self, value):
1818 1821 if value in self.set:
1819 1822 return True
1820 1823 elif not value in self.revs:
1821 1824 return False
1822 1825 else:
1823 1826 self.revs.discard(value)
1824 1827 ctx = change(value)
1825 1828 matches = filter(match, ctx.files())
1826 1829 if matches:
1827 1830 fncache[value] = matches
1828 1831 self.set.add(value)
1829 1832 return True
1830 1833 return False
1831 1834
1832 1835 def discard(self, value):
1833 1836 self.revs.discard(value)
1834 1837 self.set.discard(value)
1835 1838
1836 1839 wanted = lazywantedset()
1837 1840
1838 1841 # it might be worthwhile to do this in the iterator if the rev range
1839 1842 # is descending and the prune args are all within that range
1840 1843 for rev in opts.get('prune', ()):
1841 1844 rev = repo[rev].rev()
1842 1845 ff = _followfilter(repo)
1843 1846 stop = min(revs[0], revs[-1])
1844 1847 for x in xrange(rev, stop - 1, -1):
1845 1848 if ff.match(x):
1846 1849 wanted = wanted - [x]
1847 1850
1848 1851 # Now that wanted is correctly initialized, we can iterate over the
1849 1852 # revision range, yielding only revisions in wanted.
1850 1853 def iterate():
1851 1854 if follow and match.always():
1852 1855 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1853 1856 def want(rev):
1854 1857 return ff.match(rev) and rev in wanted
1855 1858 else:
1856 1859 def want(rev):
1857 1860 return rev in wanted
1858 1861
1859 1862 it = iter(revs)
1860 1863 stopiteration = False
1861 1864 for windowsize in increasingwindows():
1862 1865 nrevs = []
1863 1866 for i in xrange(windowsize):
1864 1867 rev = next(it, None)
1865 1868 if rev is None:
1866 1869 stopiteration = True
1867 1870 break
1868 1871 elif want(rev):
1869 1872 nrevs.append(rev)
1870 1873 for rev in sorted(nrevs):
1871 1874 fns = fncache.get(rev)
1872 1875 ctx = change(rev)
1873 1876 if not fns:
1874 1877 def fns_generator():
1875 1878 for f in ctx.files():
1876 1879 if match(f):
1877 1880 yield f
1878 1881 fns = fns_generator()
1879 1882 prepare(ctx, fns)
1880 1883 for rev in nrevs:
1881 1884 yield change(rev)
1882 1885
1883 1886 if stopiteration:
1884 1887 break
1885 1888
1886 1889 return iterate()
1887 1890
1888 1891 def _makefollowlogfilematcher(repo, files, followfirst):
1889 1892 # When displaying a revision with --patch --follow FILE, we have
1890 1893 # to know which file of the revision must be diffed. With
1891 1894 # --follow, we want the names of the ancestors of FILE in the
1892 1895 # revision, stored in "fcache". "fcache" is populated by
1893 1896 # reproducing the graph traversal already done by --follow revset
1894 1897 # and relating linkrevs to file names (which is not "correct" but
1895 1898 # good enough).
1896 1899 fcache = {}
1897 1900 fcacheready = [False]
1898 1901 pctx = repo['.']
1899 1902
1900 1903 def populate():
1901 1904 for fn in files:
1902 1905 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
1903 1906 for c in i:
1904 1907 fcache.setdefault(c.linkrev(), set()).add(c.path())
1905 1908
1906 1909 def filematcher(rev):
1907 1910 if not fcacheready[0]:
1908 1911 # Lazy initialization
1909 1912 fcacheready[0] = True
1910 1913 populate()
1911 1914 return scmutil.matchfiles(repo, fcache.get(rev, []))
1912 1915
1913 1916 return filematcher
1914 1917
1915 1918 def _makenofollowlogfilematcher(repo, pats, opts):
1916 1919 '''hook for extensions to override the filematcher for non-follow cases'''
1917 1920 return None
1918 1921
1919 1922 def _makelogrevset(repo, pats, opts, revs):
1920 1923 """Return (expr, filematcher) where expr is a revset string built
1921 1924 from log options and file patterns or None. If --stat or --patch
1922 1925 are not passed filematcher is None. Otherwise it is a callable
1923 1926 taking a revision number and returning a match objects filtering
1924 1927 the files to be detailed when displaying the revision.
1925 1928 """
1926 1929 opt2revset = {
1927 1930 'no_merges': ('not merge()', None),
1928 1931 'only_merges': ('merge()', None),
1929 1932 '_ancestors': ('ancestors(%(val)s)', None),
1930 1933 '_fancestors': ('_firstancestors(%(val)s)', None),
1931 1934 '_descendants': ('descendants(%(val)s)', None),
1932 1935 '_fdescendants': ('_firstdescendants(%(val)s)', None),
1933 1936 '_matchfiles': ('_matchfiles(%(val)s)', None),
1934 1937 'date': ('date(%(val)r)', None),
1935 1938 'branch': ('branch(%(val)r)', ' or '),
1936 1939 '_patslog': ('filelog(%(val)r)', ' or '),
1937 1940 '_patsfollow': ('follow(%(val)r)', ' or '),
1938 1941 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
1939 1942 'keyword': ('keyword(%(val)r)', ' or '),
1940 1943 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
1941 1944 'user': ('user(%(val)r)', ' or '),
1942 1945 }
1943 1946
1944 1947 opts = dict(opts)
1945 1948 # follow or not follow?
1946 1949 follow = opts.get('follow') or opts.get('follow_first')
1947 1950 if opts.get('follow_first'):
1948 1951 followfirst = 1
1949 1952 else:
1950 1953 followfirst = 0
1951 1954 # --follow with FILE behavior depends on revs...
1952 1955 it = iter(revs)
1953 1956 startrev = it.next()
1954 1957 followdescendants = startrev < next(it, startrev)
1955 1958
1956 1959 # branch and only_branch are really aliases and must be handled at
1957 1960 # the same time
1958 1961 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
1959 1962 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
1960 1963 # pats/include/exclude are passed to match.match() directly in
1961 1964 # _matchfiles() revset but walkchangerevs() builds its matcher with
1962 1965 # scmutil.match(). The difference is input pats are globbed on
1963 1966 # platforms without shell expansion (windows).
1964 1967 wctx = repo[None]
1965 1968 match, pats = scmutil.matchandpats(wctx, pats, opts)
1966 1969 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1967 1970 opts.get('removed'))
1968 1971 if not slowpath:
1969 1972 for f in match.files():
1970 1973 if follow and f not in wctx:
1971 1974 # If the file exists, it may be a directory, so let it
1972 1975 # take the slow path.
1973 1976 if os.path.exists(repo.wjoin(f)):
1974 1977 slowpath = True
1975 1978 continue
1976 1979 else:
1977 1980 raise error.Abort(_('cannot follow file not in parent '
1978 1981 'revision: "%s"') % f)
1979 1982 filelog = repo.file(f)
1980 1983 if not filelog:
1981 1984 # A zero count may be a directory or deleted file, so
1982 1985 # try to find matching entries on the slow path.
1983 1986 if follow:
1984 1987 raise error.Abort(
1985 1988 _('cannot follow nonexistent file: "%s"') % f)
1986 1989 slowpath = True
1987 1990
1988 1991 # We decided to fall back to the slowpath because at least one
1989 1992 # of the paths was not a file. Check to see if at least one of them
1990 1993 # existed in history - in that case, we'll continue down the
1991 1994 # slowpath; otherwise, we can turn off the slowpath
1992 1995 if slowpath:
1993 1996 for path in match.files():
1994 1997 if path == '.' or path in repo.store:
1995 1998 break
1996 1999 else:
1997 2000 slowpath = False
1998 2001
1999 2002 fpats = ('_patsfollow', '_patsfollowfirst')
2000 2003 fnopats = (('_ancestors', '_fancestors'),
2001 2004 ('_descendants', '_fdescendants'))
2002 2005 if slowpath:
2003 2006 # See walkchangerevs() slow path.
2004 2007 #
2005 2008 # pats/include/exclude cannot be represented as separate
2006 2009 # revset expressions as their filtering logic applies at file
2007 2010 # level. For instance "-I a -X a" matches a revision touching
2008 2011 # "a" and "b" while "file(a) and not file(b)" does
2009 2012 # not. Besides, filesets are evaluated against the working
2010 2013 # directory.
2011 2014 matchargs = ['r:', 'd:relpath']
2012 2015 for p in pats:
2013 2016 matchargs.append('p:' + p)
2014 2017 for p in opts.get('include', []):
2015 2018 matchargs.append('i:' + p)
2016 2019 for p in opts.get('exclude', []):
2017 2020 matchargs.append('x:' + p)
2018 2021 matchargs = ','.join(('%r' % p) for p in matchargs)
2019 2022 opts['_matchfiles'] = matchargs
2020 2023 if follow:
2021 2024 opts[fnopats[0][followfirst]] = '.'
2022 2025 else:
2023 2026 if follow:
2024 2027 if pats:
2025 2028 # follow() revset interprets its file argument as a
2026 2029 # manifest entry, so use match.files(), not pats.
2027 2030 opts[fpats[followfirst]] = list(match.files())
2028 2031 else:
2029 2032 op = fnopats[followdescendants][followfirst]
2030 2033 opts[op] = 'rev(%d)' % startrev
2031 2034 else:
2032 2035 opts['_patslog'] = list(pats)
2033 2036
2034 2037 filematcher = None
2035 2038 if opts.get('patch') or opts.get('stat'):
2036 2039 # When following files, track renames via a special matcher.
2037 2040 # If we're forced to take the slowpath it means we're following
2038 2041 # at least one pattern/directory, so don't bother with rename tracking.
2039 2042 if follow and not match.always() and not slowpath:
2040 2043 # _makefollowlogfilematcher expects its files argument to be
2041 2044 # relative to the repo root, so use match.files(), not pats.
2042 2045 filematcher = _makefollowlogfilematcher(repo, match.files(),
2043 2046 followfirst)
2044 2047 else:
2045 2048 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2046 2049 if filematcher is None:
2047 2050 filematcher = lambda rev: match
2048 2051
2049 2052 expr = []
2050 2053 for op, val in sorted(opts.iteritems()):
2051 2054 if not val:
2052 2055 continue
2053 2056 if op not in opt2revset:
2054 2057 continue
2055 2058 revop, andor = opt2revset[op]
2056 2059 if '%(val)' not in revop:
2057 2060 expr.append(revop)
2058 2061 else:
2059 2062 if not isinstance(val, list):
2060 2063 e = revop % {'val': val}
2061 2064 else:
2062 2065 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2063 2066 expr.append(e)
2064 2067
2065 2068 if expr:
2066 2069 expr = '(' + ' and '.join(expr) + ')'
2067 2070 else:
2068 2071 expr = None
2069 2072 return expr, filematcher
2070 2073
2071 2074 def _logrevs(repo, opts):
2072 2075 # Default --rev value depends on --follow but --follow behavior
2073 2076 # depends on revisions resolved from --rev...
2074 2077 follow = opts.get('follow') or opts.get('follow_first')
2075 2078 if opts.get('rev'):
2076 2079 revs = scmutil.revrange(repo, opts['rev'])
2077 2080 elif follow and repo.dirstate.p1() == nullid:
2078 2081 revs = revset.baseset()
2079 2082 elif follow:
2080 2083 revs = repo.revs('reverse(:.)')
2081 2084 else:
2082 2085 revs = revset.spanset(repo)
2083 2086 revs.reverse()
2084 2087 return revs
2085 2088
2086 2089 def getgraphlogrevs(repo, pats, opts):
2087 2090 """Return (revs, expr, filematcher) where revs is an iterable of
2088 2091 revision numbers, expr is a revset string built from log options
2089 2092 and file patterns or None, and used to filter 'revs'. If --stat or
2090 2093 --patch are not passed filematcher is None. Otherwise it is a
2091 2094 callable taking a revision number and returning a match objects
2092 2095 filtering the files to be detailed when displaying the revision.
2093 2096 """
2094 2097 limit = loglimit(opts)
2095 2098 revs = _logrevs(repo, opts)
2096 2099 if not revs:
2097 2100 return revset.baseset(), None, None
2098 2101 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2099 2102 if opts.get('rev'):
2100 2103 # User-specified revs might be unsorted, but don't sort before
2101 2104 # _makelogrevset because it might depend on the order of revs
2102 2105 revs.sort(reverse=True)
2103 2106 if expr:
2104 2107 # Revset matchers often operate faster on revisions in changelog
2105 2108 # order, because most filters deal with the changelog.
2106 2109 revs.reverse()
2107 2110 matcher = revset.match(repo.ui, expr)
2108 2111 # Revset matches can reorder revisions. "A or B" typically returns
2109 2112 # returns the revision matching A then the revision matching B. Sort
2110 2113 # again to fix that.
2111 2114 revs = matcher(repo, revs)
2112 2115 revs.sort(reverse=True)
2113 2116 if limit is not None:
2114 2117 limitedrevs = []
2115 2118 for idx, rev in enumerate(revs):
2116 2119 if idx >= limit:
2117 2120 break
2118 2121 limitedrevs.append(rev)
2119 2122 revs = revset.baseset(limitedrevs)
2120 2123
2121 2124 return revs, expr, filematcher
2122 2125
2123 2126 def getlogrevs(repo, pats, opts):
2124 2127 """Return (revs, expr, filematcher) where revs is an iterable of
2125 2128 revision numbers, expr is a revset string built from log options
2126 2129 and file patterns or None, and used to filter 'revs'. If --stat or
2127 2130 --patch are not passed filematcher is None. Otherwise it is a
2128 2131 callable taking a revision number and returning a match objects
2129 2132 filtering the files to be detailed when displaying the revision.
2130 2133 """
2131 2134 limit = loglimit(opts)
2132 2135 revs = _logrevs(repo, opts)
2133 2136 if not revs:
2134 2137 return revset.baseset([]), None, None
2135 2138 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2136 2139 if expr:
2137 2140 # Revset matchers often operate faster on revisions in changelog
2138 2141 # order, because most filters deal with the changelog.
2139 2142 if not opts.get('rev'):
2140 2143 revs.reverse()
2141 2144 matcher = revset.match(repo.ui, expr)
2142 2145 # Revset matches can reorder revisions. "A or B" typically returns
2143 2146 # returns the revision matching A then the revision matching B. Sort
2144 2147 # again to fix that.
2145 2148 revs = matcher(repo, revs)
2146 2149 if not opts.get('rev'):
2147 2150 revs.sort(reverse=True)
2148 2151 if limit is not None:
2149 2152 limitedrevs = []
2150 2153 for idx, r in enumerate(revs):
2151 2154 if limit <= idx:
2152 2155 break
2153 2156 limitedrevs.append(r)
2154 2157 revs = revset.baseset(limitedrevs)
2155 2158
2156 2159 return revs, expr, filematcher
2157 2160
2158 2161 def _graphnodeformatter(ui, displayer):
2159 2162 spec = ui.config('ui', 'graphnodetemplate')
2160 2163 if not spec:
2161 2164 return templatekw.showgraphnode # fast path for "{graphnode}"
2162 2165
2163 2166 templ = formatter.gettemplater(ui, 'graphnode', spec)
2164 2167 cache = {}
2165 2168 if isinstance(displayer, changeset_templater):
2166 2169 cache = displayer.cache # reuse cache of slow templates
2167 2170 props = templatekw.keywords.copy()
2168 2171 props['templ'] = templ
2169 2172 props['cache'] = cache
2170 2173 def formatnode(repo, ctx):
2171 2174 props['ctx'] = ctx
2172 2175 props['repo'] = repo
2173 2176 props['revcache'] = {}
2174 2177 return templater.stringify(templ('graphnode', **props))
2175 2178 return formatnode
2176 2179
2177 2180 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2178 2181 filematcher=None):
2179 2182 formatnode = _graphnodeformatter(ui, displayer)
2180 2183 seen, state = [], graphmod.asciistate()
2181 2184 for rev, type, ctx, parents in dag:
2182 2185 char = formatnode(repo, ctx)
2183 2186 copies = None
2184 2187 if getrenamed and ctx.rev():
2185 2188 copies = []
2186 2189 for fn in ctx.files():
2187 2190 rename = getrenamed(fn, ctx.rev())
2188 2191 if rename:
2189 2192 copies.append((fn, rename[0]))
2190 2193 revmatchfn = None
2191 2194 if filematcher is not None:
2192 2195 revmatchfn = filematcher(ctx.rev())
2193 2196 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2194 2197 lines = displayer.hunk.pop(rev).split('\n')
2195 2198 if not lines[-1]:
2196 2199 del lines[-1]
2197 2200 displayer.flush(ctx)
2198 2201 edges = edgefn(type, char, lines, seen, rev, parents)
2199 2202 for type, char, lines, coldata in edges:
2200 2203 graphmod.ascii(ui, state, type, char, lines, coldata)
2201 2204 displayer.close()
2202 2205
2203 2206 def graphlog(ui, repo, *pats, **opts):
2204 2207 # Parameters are identical to log command ones
2205 2208 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2206 2209 revdag = graphmod.dagwalker(repo, revs)
2207 2210
2208 2211 getrenamed = None
2209 2212 if opts.get('copies'):
2210 2213 endrev = None
2211 2214 if opts.get('rev'):
2212 2215 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2213 2216 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2214 2217 displayer = show_changeset(ui, repo, opts, buffered=True)
2215 2218 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2216 2219 filematcher)
2217 2220
2218 2221 def checkunsupportedgraphflags(pats, opts):
2219 2222 for op in ["newest_first"]:
2220 2223 if op in opts and opts[op]:
2221 2224 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2222 2225 % op.replace("_", "-"))
2223 2226
2224 2227 def graphrevs(repo, nodes, opts):
2225 2228 limit = loglimit(opts)
2226 2229 nodes.reverse()
2227 2230 if limit is not None:
2228 2231 nodes = nodes[:limit]
2229 2232 return graphmod.nodes(repo, nodes)
2230 2233
2231 2234 def add(ui, repo, match, prefix, explicitonly, **opts):
2232 2235 join = lambda f: os.path.join(prefix, f)
2233 2236 bad = []
2234 2237
2235 2238 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2236 2239 names = []
2237 2240 wctx = repo[None]
2238 2241 cca = None
2239 2242 abort, warn = scmutil.checkportabilityalert(ui)
2240 2243 if abort or warn:
2241 2244 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2242 2245
2243 2246 badmatch = matchmod.badmatch(match, badfn)
2244 2247 dirstate = repo.dirstate
2245 2248 # We don't want to just call wctx.walk here, since it would return a lot of
2246 2249 # clean files, which we aren't interested in and takes time.
2247 2250 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2248 2251 True, False, full=False)):
2249 2252 exact = match.exact(f)
2250 2253 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2251 2254 if cca:
2252 2255 cca(f)
2253 2256 names.append(f)
2254 2257 if ui.verbose or not exact:
2255 2258 ui.status(_('adding %s\n') % match.rel(f))
2256 2259
2257 2260 for subpath in sorted(wctx.substate):
2258 2261 sub = wctx.sub(subpath)
2259 2262 try:
2260 2263 submatch = matchmod.narrowmatcher(subpath, match)
2261 2264 if opts.get('subrepos'):
2262 2265 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2263 2266 else:
2264 2267 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2265 2268 except error.LookupError:
2266 2269 ui.status(_("skipping missing subrepository: %s\n")
2267 2270 % join(subpath))
2268 2271
2269 2272 if not opts.get('dry_run'):
2270 2273 rejected = wctx.add(names, prefix)
2271 2274 bad.extend(f for f in rejected if f in match.files())
2272 2275 return bad
2273 2276
2274 2277 def forget(ui, repo, match, prefix, explicitonly):
2275 2278 join = lambda f: os.path.join(prefix, f)
2276 2279 bad = []
2277 2280 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2278 2281 wctx = repo[None]
2279 2282 forgot = []
2280 2283
2281 2284 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2282 2285 forget = sorted(s[0] + s[1] + s[3] + s[6])
2283 2286 if explicitonly:
2284 2287 forget = [f for f in forget if match.exact(f)]
2285 2288
2286 2289 for subpath in sorted(wctx.substate):
2287 2290 sub = wctx.sub(subpath)
2288 2291 try:
2289 2292 submatch = matchmod.narrowmatcher(subpath, match)
2290 2293 subbad, subforgot = sub.forget(submatch, prefix)
2291 2294 bad.extend([subpath + '/' + f for f in subbad])
2292 2295 forgot.extend([subpath + '/' + f for f in subforgot])
2293 2296 except error.LookupError:
2294 2297 ui.status(_("skipping missing subrepository: %s\n")
2295 2298 % join(subpath))
2296 2299
2297 2300 if not explicitonly:
2298 2301 for f in match.files():
2299 2302 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2300 2303 if f not in forgot:
2301 2304 if repo.wvfs.exists(f):
2302 2305 # Don't complain if the exact case match wasn't given.
2303 2306 # But don't do this until after checking 'forgot', so
2304 2307 # that subrepo files aren't normalized, and this op is
2305 2308 # purely from data cached by the status walk above.
2306 2309 if repo.dirstate.normalize(f) in repo.dirstate:
2307 2310 continue
2308 2311 ui.warn(_('not removing %s: '
2309 2312 'file is already untracked\n')
2310 2313 % match.rel(f))
2311 2314 bad.append(f)
2312 2315
2313 2316 for f in forget:
2314 2317 if ui.verbose or not match.exact(f):
2315 2318 ui.status(_('removing %s\n') % match.rel(f))
2316 2319
2317 2320 rejected = wctx.forget(forget, prefix)
2318 2321 bad.extend(f for f in rejected if f in match.files())
2319 2322 forgot.extend(f for f in forget if f not in rejected)
2320 2323 return bad, forgot
2321 2324
2322 2325 def files(ui, ctx, m, fm, fmt, subrepos):
2323 2326 rev = ctx.rev()
2324 2327 ret = 1
2325 2328 ds = ctx.repo().dirstate
2326 2329
2327 2330 for f in ctx.matches(m):
2328 2331 if rev is None and ds[f] == 'r':
2329 2332 continue
2330 2333 fm.startitem()
2331 2334 if ui.verbose:
2332 2335 fc = ctx[f]
2333 2336 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2334 2337 fm.data(abspath=f)
2335 2338 fm.write('path', fmt, m.rel(f))
2336 2339 ret = 0
2337 2340
2338 2341 for subpath in sorted(ctx.substate):
2339 2342 def matchessubrepo(subpath):
2340 2343 return (m.always() or m.exact(subpath)
2341 2344 or any(f.startswith(subpath + '/') for f in m.files()))
2342 2345
2343 2346 if subrepos or matchessubrepo(subpath):
2344 2347 sub = ctx.sub(subpath)
2345 2348 try:
2346 2349 submatch = matchmod.narrowmatcher(subpath, m)
2347 2350 if sub.printfiles(ui, submatch, fm, fmt, subrepos) == 0:
2348 2351 ret = 0
2349 2352 except error.LookupError:
2350 2353 ui.status(_("skipping missing subrepository: %s\n")
2351 2354 % m.abs(subpath))
2352 2355
2353 2356 return ret
2354 2357
2355 2358 def remove(ui, repo, m, prefix, after, force, subrepos):
2356 2359 join = lambda f: os.path.join(prefix, f)
2357 2360 ret = 0
2358 2361 s = repo.status(match=m, clean=True)
2359 2362 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2360 2363
2361 2364 wctx = repo[None]
2362 2365
2363 2366 for subpath in sorted(wctx.substate):
2364 2367 def matchessubrepo(matcher, subpath):
2365 2368 if matcher.exact(subpath):
2366 2369 return True
2367 2370 for f in matcher.files():
2368 2371 if f.startswith(subpath):
2369 2372 return True
2370 2373 return False
2371 2374
2372 2375 if subrepos or matchessubrepo(m, subpath):
2373 2376 sub = wctx.sub(subpath)
2374 2377 try:
2375 2378 submatch = matchmod.narrowmatcher(subpath, m)
2376 2379 if sub.removefiles(submatch, prefix, after, force, subrepos):
2377 2380 ret = 1
2378 2381 except error.LookupError:
2379 2382 ui.status(_("skipping missing subrepository: %s\n")
2380 2383 % join(subpath))
2381 2384
2382 2385 # warn about failure to delete explicit files/dirs
2383 2386 deleteddirs = util.dirs(deleted)
2384 2387 for f in m.files():
2385 2388 def insubrepo():
2386 2389 for subpath in wctx.substate:
2387 2390 if f.startswith(subpath):
2388 2391 return True
2389 2392 return False
2390 2393
2391 2394 isdir = f in deleteddirs or wctx.hasdir(f)
2392 2395 if f in repo.dirstate or isdir or f == '.' or insubrepo():
2393 2396 continue
2394 2397
2395 2398 if repo.wvfs.exists(f):
2396 2399 if repo.wvfs.isdir(f):
2397 2400 ui.warn(_('not removing %s: no tracked files\n')
2398 2401 % m.rel(f))
2399 2402 else:
2400 2403 ui.warn(_('not removing %s: file is untracked\n')
2401 2404 % m.rel(f))
2402 2405 # missing files will generate a warning elsewhere
2403 2406 ret = 1
2404 2407
2405 2408 if force:
2406 2409 list = modified + deleted + clean + added
2407 2410 elif after:
2408 2411 list = deleted
2409 2412 for f in modified + added + clean:
2410 2413 ui.warn(_('not removing %s: file still exists\n') % m.rel(f))
2411 2414 ret = 1
2412 2415 else:
2413 2416 list = deleted + clean
2414 2417 for f in modified:
2415 2418 ui.warn(_('not removing %s: file is modified (use -f'
2416 2419 ' to force removal)\n') % m.rel(f))
2417 2420 ret = 1
2418 2421 for f in added:
2419 2422 ui.warn(_('not removing %s: file has been marked for add'
2420 2423 ' (use forget to undo)\n') % m.rel(f))
2421 2424 ret = 1
2422 2425
2423 2426 for f in sorted(list):
2424 2427 if ui.verbose or not m.exact(f):
2425 2428 ui.status(_('removing %s\n') % m.rel(f))
2426 2429
2427 2430 wlock = repo.wlock()
2428 2431 try:
2429 2432 if not after:
2430 2433 for f in list:
2431 2434 if f in added:
2432 2435 continue # we never unlink added files on remove
2433 2436 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
2434 2437 repo[None].forget(list)
2435 2438 finally:
2436 2439 wlock.release()
2437 2440
2438 2441 return ret
2439 2442
2440 2443 def cat(ui, repo, ctx, matcher, prefix, **opts):
2441 2444 err = 1
2442 2445
2443 2446 def write(path):
2444 2447 fp = makefileobj(repo, opts.get('output'), ctx.node(),
2445 2448 pathname=os.path.join(prefix, path))
2446 2449 data = ctx[path].data()
2447 2450 if opts.get('decode'):
2448 2451 data = repo.wwritedata(path, data)
2449 2452 fp.write(data)
2450 2453 fp.close()
2451 2454
2452 2455 # Automation often uses hg cat on single files, so special case it
2453 2456 # for performance to avoid the cost of parsing the manifest.
2454 2457 if len(matcher.files()) == 1 and not matcher.anypats():
2455 2458 file = matcher.files()[0]
2456 2459 mf = repo.manifest
2457 2460 mfnode = ctx.manifestnode()
2458 2461 if mfnode and mf.find(mfnode, file)[0]:
2459 2462 write(file)
2460 2463 return 0
2461 2464
2462 2465 # Don't warn about "missing" files that are really in subrepos
2463 2466 def badfn(path, msg):
2464 2467 for subpath in ctx.substate:
2465 2468 if path.startswith(subpath):
2466 2469 return
2467 2470 matcher.bad(path, msg)
2468 2471
2469 2472 for abs in ctx.walk(matchmod.badmatch(matcher, badfn)):
2470 2473 write(abs)
2471 2474 err = 0
2472 2475
2473 2476 for subpath in sorted(ctx.substate):
2474 2477 sub = ctx.sub(subpath)
2475 2478 try:
2476 2479 submatch = matchmod.narrowmatcher(subpath, matcher)
2477 2480
2478 2481 if not sub.cat(submatch, os.path.join(prefix, sub._path),
2479 2482 **opts):
2480 2483 err = 0
2481 2484 except error.RepoLookupError:
2482 2485 ui.status(_("skipping missing subrepository: %s\n")
2483 2486 % os.path.join(prefix, subpath))
2484 2487
2485 2488 return err
2486 2489
2487 2490 def commit(ui, repo, commitfunc, pats, opts):
2488 2491 '''commit the specified files or all outstanding changes'''
2489 2492 date = opts.get('date')
2490 2493 if date:
2491 2494 opts['date'] = util.parsedate(date)
2492 2495 message = logmessage(ui, opts)
2493 2496 matcher = scmutil.match(repo[None], pats, opts)
2494 2497
2495 2498 # extract addremove carefully -- this function can be called from a command
2496 2499 # that doesn't support addremove
2497 2500 if opts.get('addremove'):
2498 2501 if scmutil.addremove(repo, matcher, "", opts) != 0:
2499 2502 raise error.Abort(
2500 2503 _("failed to mark all new/missing files as added/removed"))
2501 2504
2502 2505 return commitfunc(ui, repo, message, matcher, opts)
2503 2506
2504 2507 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2505 2508 # avoid cycle context -> subrepo -> cmdutil
2506 2509 import context
2507 2510
2508 2511 # amend will reuse the existing user if not specified, but the obsolete
2509 2512 # marker creation requires that the current user's name is specified.
2510 2513 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2511 2514 ui.username() # raise exception if username not set
2512 2515
2513 2516 ui.note(_('amending changeset %s\n') % old)
2514 2517 base = old.p1()
2515 2518 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2516 2519
2517 2520 wlock = lock = newid = None
2518 2521 try:
2519 2522 wlock = repo.wlock()
2520 2523 lock = repo.lock()
2521 2524 tr = repo.transaction('amend')
2522 2525 try:
2523 2526 # See if we got a message from -m or -l, if not, open the editor
2524 2527 # with the message of the changeset to amend
2525 2528 message = logmessage(ui, opts)
2526 2529 # ensure logfile does not conflict with later enforcement of the
2527 2530 # message. potential logfile content has been processed by
2528 2531 # `logmessage` anyway.
2529 2532 opts.pop('logfile')
2530 2533 # First, do a regular commit to record all changes in the working
2531 2534 # directory (if there are any)
2532 2535 ui.callhooks = False
2533 2536 activebookmark = repo._activebookmark
2534 2537 try:
2535 2538 repo._activebookmark = None
2536 2539 opts['message'] = 'temporary amend commit for %s' % old
2537 2540 node = commit(ui, repo, commitfunc, pats, opts)
2538 2541 finally:
2539 2542 repo._activebookmark = activebookmark
2540 2543 ui.callhooks = True
2541 2544 ctx = repo[node]
2542 2545
2543 2546 # Participating changesets:
2544 2547 #
2545 2548 # node/ctx o - new (intermediate) commit that contains changes
2546 2549 # | from working dir to go into amending commit
2547 2550 # | (or a workingctx if there were no changes)
2548 2551 # |
2549 2552 # old o - changeset to amend
2550 2553 # |
2551 2554 # base o - parent of amending changeset
2552 2555
2553 2556 # Update extra dict from amended commit (e.g. to preserve graft
2554 2557 # source)
2555 2558 extra.update(old.extra())
2556 2559
2557 2560 # Also update it from the intermediate commit or from the wctx
2558 2561 extra.update(ctx.extra())
2559 2562
2560 2563 if len(old.parents()) > 1:
2561 2564 # ctx.files() isn't reliable for merges, so fall back to the
2562 2565 # slower repo.status() method
2563 2566 files = set([fn for st in repo.status(base, old)[:3]
2564 2567 for fn in st])
2565 2568 else:
2566 2569 files = set(old.files())
2567 2570
2568 2571 # Second, we use either the commit we just did, or if there were no
2569 2572 # changes the parent of the working directory as the version of the
2570 2573 # files in the final amend commit
2571 2574 if node:
2572 2575 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2573 2576
2574 2577 user = ctx.user()
2575 2578 date = ctx.date()
2576 2579 # Recompute copies (avoid recording a -> b -> a)
2577 2580 copied = copies.pathcopies(base, ctx)
2578 2581 if old.p2:
2579 2582 copied.update(copies.pathcopies(old.p2(), ctx))
2580 2583
2581 2584 # Prune files which were reverted by the updates: if old
2582 2585 # introduced file X and our intermediate commit, node,
2583 2586 # renamed that file, then those two files are the same and
2584 2587 # we can discard X from our list of files. Likewise if X
2585 2588 # was deleted, it's no longer relevant
2586 2589 files.update(ctx.files())
2587 2590
2588 2591 def samefile(f):
2589 2592 if f in ctx.manifest():
2590 2593 a = ctx.filectx(f)
2591 2594 if f in base.manifest():
2592 2595 b = base.filectx(f)
2593 2596 return (not a.cmp(b)
2594 2597 and a.flags() == b.flags())
2595 2598 else:
2596 2599 return False
2597 2600 else:
2598 2601 return f not in base.manifest()
2599 2602 files = [f for f in files if not samefile(f)]
2600 2603
2601 2604 def filectxfn(repo, ctx_, path):
2602 2605 try:
2603 2606 fctx = ctx[path]
2604 2607 flags = fctx.flags()
2605 2608 mctx = context.memfilectx(repo,
2606 2609 fctx.path(), fctx.data(),
2607 2610 islink='l' in flags,
2608 2611 isexec='x' in flags,
2609 2612 copied=copied.get(path))
2610 2613 return mctx
2611 2614 except KeyError:
2612 2615 return None
2613 2616 else:
2614 2617 ui.note(_('copying changeset %s to %s\n') % (old, base))
2615 2618
2616 2619 # Use version of files as in the old cset
2617 2620 def filectxfn(repo, ctx_, path):
2618 2621 try:
2619 2622 return old.filectx(path)
2620 2623 except KeyError:
2621 2624 return None
2622 2625
2623 2626 user = opts.get('user') or old.user()
2624 2627 date = opts.get('date') or old.date()
2625 2628 editform = mergeeditform(old, 'commit.amend')
2626 2629 editor = getcommiteditor(editform=editform, **opts)
2627 2630 if not message:
2628 2631 editor = getcommiteditor(edit=True, editform=editform)
2629 2632 message = old.description()
2630 2633
2631 2634 pureextra = extra.copy()
2632 2635 if 'amend_source' in pureextra:
2633 2636 del pureextra['amend_source']
2634 2637 pureoldextra = old.extra()
2635 2638 if 'amend_source' in pureoldextra:
2636 2639 del pureoldextra['amend_source']
2637 2640 extra['amend_source'] = old.hex()
2638 2641
2639 2642 new = context.memctx(repo,
2640 2643 parents=[base.node(), old.p2().node()],
2641 2644 text=message,
2642 2645 files=files,
2643 2646 filectxfn=filectxfn,
2644 2647 user=user,
2645 2648 date=date,
2646 2649 extra=extra,
2647 2650 editor=editor)
2648 2651
2649 2652 newdesc = changelog.stripdesc(new.description())
2650 2653 if ((not node)
2651 2654 and newdesc == old.description()
2652 2655 and user == old.user()
2653 2656 and date == old.date()
2654 2657 and pureextra == pureoldextra):
2655 2658 # nothing changed. continuing here would create a new node
2656 2659 # anyway because of the amend_source noise.
2657 2660 #
2658 2661 # This not what we expect from amend.
2659 2662 return old.node()
2660 2663
2661 2664 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2662 2665 try:
2663 2666 if opts.get('secret'):
2664 2667 commitphase = 'secret'
2665 2668 else:
2666 2669 commitphase = old.phase()
2667 2670 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2668 2671 newid = repo.commitctx(new)
2669 2672 finally:
2670 2673 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2671 2674 if newid != old.node():
2672 2675 # Reroute the working copy parent to the new changeset
2673 2676 repo.setparents(newid, nullid)
2674 2677
2675 2678 # Move bookmarks from old parent to amend commit
2676 2679 bms = repo.nodebookmarks(old.node())
2677 2680 if bms:
2678 2681 marks = repo._bookmarks
2679 2682 for bm in bms:
2680 2683 ui.debug('moving bookmarks %r from %s to %s\n' %
2681 2684 (marks, old.hex(), hex(newid)))
2682 2685 marks[bm] = newid
2683 2686 marks.recordchange(tr)
2684 2687 #commit the whole amend process
2685 2688 if createmarkers:
2686 2689 # mark the new changeset as successor of the rewritten one
2687 2690 new = repo[newid]
2688 2691 obs = [(old, (new,))]
2689 2692 if node:
2690 2693 obs.append((ctx, ()))
2691 2694
2692 2695 obsolete.createmarkers(repo, obs)
2693 2696 tr.close()
2694 2697 finally:
2695 2698 tr.release()
2696 2699 if not createmarkers and newid != old.node():
2697 2700 # Strip the intermediate commit (if there was one) and the amended
2698 2701 # commit
2699 2702 if node:
2700 2703 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2701 2704 ui.note(_('stripping amended changeset %s\n') % old)
2702 2705 repair.strip(ui, repo, old.node(), topic='amend-backup')
2703 2706 finally:
2704 2707 lockmod.release(lock, wlock)
2705 2708 return newid
2706 2709
2707 2710 def commiteditor(repo, ctx, subs, editform=''):
2708 2711 if ctx.description():
2709 2712 return ctx.description()
2710 2713 return commitforceeditor(repo, ctx, subs, editform=editform,
2711 2714 unchangedmessagedetection=True)
2712 2715
2713 2716 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2714 2717 editform='', unchangedmessagedetection=False):
2715 2718 if not extramsg:
2716 2719 extramsg = _("Leave message empty to abort commit.")
2717 2720
2718 2721 forms = [e for e in editform.split('.') if e]
2719 2722 forms.insert(0, 'changeset')
2720 2723 templatetext = None
2721 2724 while forms:
2722 2725 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2723 2726 if tmpl:
2724 2727 templatetext = committext = buildcommittemplate(
2725 2728 repo, ctx, subs, extramsg, tmpl)
2726 2729 break
2727 2730 forms.pop()
2728 2731 else:
2729 2732 committext = buildcommittext(repo, ctx, subs, extramsg)
2730 2733
2731 2734 # run editor in the repository root
2732 2735 olddir = os.getcwd()
2733 2736 os.chdir(repo.root)
2734 2737
2735 2738 # make in-memory changes visible to external process
2736 2739 tr = repo.currenttransaction()
2737 2740 repo.dirstate.write(tr)
2738 2741 pending = tr and tr.writepending() and repo.root
2739 2742
2740 2743 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2741 2744 editform=editform, pending=pending)
2742 2745 text = re.sub("(?m)^HG:.*(\n|$)", "", editortext)
2743 2746 os.chdir(olddir)
2744 2747
2745 2748 if finishdesc:
2746 2749 text = finishdesc(text)
2747 2750 if not text.strip():
2748 2751 raise error.Abort(_("empty commit message"))
2749 2752 if unchangedmessagedetection and editortext == templatetext:
2750 2753 raise error.Abort(_("commit message unchanged"))
2751 2754
2752 2755 return text
2753 2756
2754 2757 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2755 2758 ui = repo.ui
2756 2759 tmpl, mapfile = gettemplate(ui, tmpl, None)
2757 2760
2758 2761 try:
2759 2762 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2760 2763 except SyntaxError as inst:
2761 2764 raise error.Abort(inst.args[0])
2762 2765
2763 2766 for k, v in repo.ui.configitems('committemplate'):
2764 2767 if k != 'changeset':
2765 2768 t.t.cache[k] = v
2766 2769
2767 2770 if not extramsg:
2768 2771 extramsg = '' # ensure that extramsg is string
2769 2772
2770 2773 ui.pushbuffer()
2771 2774 t.show(ctx, extramsg=extramsg)
2772 2775 return ui.popbuffer()
2773 2776
2774 2777 def hgprefix(msg):
2775 2778 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2776 2779
2777 2780 def buildcommittext(repo, ctx, subs, extramsg):
2778 2781 edittext = []
2779 2782 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2780 2783 if ctx.description():
2781 2784 edittext.append(ctx.description())
2782 2785 edittext.append("")
2783 2786 edittext.append("") # Empty line between message and comments.
2784 2787 edittext.append(hgprefix(_("Enter commit message."
2785 2788 " Lines beginning with 'HG:' are removed.")))
2786 2789 edittext.append(hgprefix(extramsg))
2787 2790 edittext.append("HG: --")
2788 2791 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2789 2792 if ctx.p2():
2790 2793 edittext.append(hgprefix(_("branch merge")))
2791 2794 if ctx.branch():
2792 2795 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2793 2796 if bookmarks.isactivewdirparent(repo):
2794 2797 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2795 2798 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2796 2799 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2797 2800 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2798 2801 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2799 2802 if not added and not modified and not removed:
2800 2803 edittext.append(hgprefix(_("no files changed")))
2801 2804 edittext.append("")
2802 2805
2803 2806 return "\n".join(edittext)
2804 2807
2805 2808 def commitstatus(repo, node, branch, bheads=None, opts=None):
2806 2809 if opts is None:
2807 2810 opts = {}
2808 2811 ctx = repo[node]
2809 2812 parents = ctx.parents()
2810 2813
2811 2814 if (not opts.get('amend') and bheads and node not in bheads and not
2812 2815 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2813 2816 repo.ui.status(_('created new head\n'))
2814 2817 # The message is not printed for initial roots. For the other
2815 2818 # changesets, it is printed in the following situations:
2816 2819 #
2817 2820 # Par column: for the 2 parents with ...
2818 2821 # N: null or no parent
2819 2822 # B: parent is on another named branch
2820 2823 # C: parent is a regular non head changeset
2821 2824 # H: parent was a branch head of the current branch
2822 2825 # Msg column: whether we print "created new head" message
2823 2826 # In the following, it is assumed that there already exists some
2824 2827 # initial branch heads of the current branch, otherwise nothing is
2825 2828 # printed anyway.
2826 2829 #
2827 2830 # Par Msg Comment
2828 2831 # N N y additional topo root
2829 2832 #
2830 2833 # B N y additional branch root
2831 2834 # C N y additional topo head
2832 2835 # H N n usual case
2833 2836 #
2834 2837 # B B y weird additional branch root
2835 2838 # C B y branch merge
2836 2839 # H B n merge with named branch
2837 2840 #
2838 2841 # C C y additional head from merge
2839 2842 # C H n merge with a head
2840 2843 #
2841 2844 # H H n head merge: head count decreases
2842 2845
2843 2846 if not opts.get('close_branch'):
2844 2847 for r in parents:
2845 2848 if r.closesbranch() and r.branch() == branch:
2846 2849 repo.ui.status(_('reopening closed branch head %d\n') % r)
2847 2850
2848 2851 if repo.ui.debugflag:
2849 2852 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2850 2853 elif repo.ui.verbose:
2851 2854 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2852 2855
2853 2856 def revert(ui, repo, ctx, parents, *pats, **opts):
2854 2857 parent, p2 = parents
2855 2858 node = ctx.node()
2856 2859
2857 2860 mf = ctx.manifest()
2858 2861 if node == p2:
2859 2862 parent = p2
2860 2863 if node == parent:
2861 2864 pmf = mf
2862 2865 else:
2863 2866 pmf = None
2864 2867
2865 2868 # need all matching names in dirstate and manifest of target rev,
2866 2869 # so have to walk both. do not print errors if files exist in one
2867 2870 # but not other. in both cases, filesets should be evaluated against
2868 2871 # workingctx to get consistent result (issue4497). this means 'set:**'
2869 2872 # cannot be used to select missing files from target rev.
2870 2873
2871 2874 # `names` is a mapping for all elements in working copy and target revision
2872 2875 # The mapping is in the form:
2873 2876 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2874 2877 names = {}
2875 2878
2876 2879 wlock = repo.wlock()
2877 2880 try:
2878 2881 ## filling of the `names` mapping
2879 2882 # walk dirstate to fill `names`
2880 2883
2881 2884 interactive = opts.get('interactive', False)
2882 2885 wctx = repo[None]
2883 2886 m = scmutil.match(wctx, pats, opts)
2884 2887
2885 2888 # we'll need this later
2886 2889 targetsubs = sorted(s for s in wctx.substate if m(s))
2887 2890
2888 2891 if not m.always():
2889 2892 for abs in repo.walk(matchmod.badmatch(m, lambda x, y: False)):
2890 2893 names[abs] = m.rel(abs), m.exact(abs)
2891 2894
2892 2895 # walk target manifest to fill `names`
2893 2896
2894 2897 def badfn(path, msg):
2895 2898 if path in names:
2896 2899 return
2897 2900 if path in ctx.substate:
2898 2901 return
2899 2902 path_ = path + '/'
2900 2903 for f in names:
2901 2904 if f.startswith(path_):
2902 2905 return
2903 2906 ui.warn("%s: %s\n" % (m.rel(path), msg))
2904 2907
2905 2908 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
2906 2909 if abs not in names:
2907 2910 names[abs] = m.rel(abs), m.exact(abs)
2908 2911
2909 2912 # Find status of all file in `names`.
2910 2913 m = scmutil.matchfiles(repo, names)
2911 2914
2912 2915 changes = repo.status(node1=node, match=m,
2913 2916 unknown=True, ignored=True, clean=True)
2914 2917 else:
2915 2918 changes = repo.status(node1=node, match=m)
2916 2919 for kind in changes:
2917 2920 for abs in kind:
2918 2921 names[abs] = m.rel(abs), m.exact(abs)
2919 2922
2920 2923 m = scmutil.matchfiles(repo, names)
2921 2924
2922 2925 modified = set(changes.modified)
2923 2926 added = set(changes.added)
2924 2927 removed = set(changes.removed)
2925 2928 _deleted = set(changes.deleted)
2926 2929 unknown = set(changes.unknown)
2927 2930 unknown.update(changes.ignored)
2928 2931 clean = set(changes.clean)
2929 2932 modadded = set()
2930 2933
2931 2934 # split between files known in target manifest and the others
2932 2935 smf = set(mf)
2933 2936
2934 2937 # determine the exact nature of the deleted changesets
2935 2938 deladded = _deleted - smf
2936 2939 deleted = _deleted - deladded
2937 2940
2938 2941 # We need to account for the state of the file in the dirstate,
2939 2942 # even when we revert against something else than parent. This will
2940 2943 # slightly alter the behavior of revert (doing back up or not, delete
2941 2944 # or just forget etc).
2942 2945 if parent == node:
2943 2946 dsmodified = modified
2944 2947 dsadded = added
2945 2948 dsremoved = removed
2946 2949 # store all local modifications, useful later for rename detection
2947 2950 localchanges = dsmodified | dsadded
2948 2951 modified, added, removed = set(), set(), set()
2949 2952 else:
2950 2953 changes = repo.status(node1=parent, match=m)
2951 2954 dsmodified = set(changes.modified)
2952 2955 dsadded = set(changes.added)
2953 2956 dsremoved = set(changes.removed)
2954 2957 # store all local modifications, useful later for rename detection
2955 2958 localchanges = dsmodified | dsadded
2956 2959
2957 2960 # only take into account for removes between wc and target
2958 2961 clean |= dsremoved - removed
2959 2962 dsremoved &= removed
2960 2963 # distinct between dirstate remove and other
2961 2964 removed -= dsremoved
2962 2965
2963 2966 modadded = added & dsmodified
2964 2967 added -= modadded
2965 2968
2966 2969 # tell newly modified apart.
2967 2970 dsmodified &= modified
2968 2971 dsmodified |= modified & dsadded # dirstate added may needs backup
2969 2972 modified -= dsmodified
2970 2973
2971 2974 # We need to wait for some post-processing to update this set
2972 2975 # before making the distinction. The dirstate will be used for
2973 2976 # that purpose.
2974 2977 dsadded = added
2975 2978
2976 2979 # in case of merge, files that are actually added can be reported as
2977 2980 # modified, we need to post process the result
2978 2981 if p2 != nullid:
2979 2982 if pmf is None:
2980 2983 # only need parent manifest in the merge case,
2981 2984 # so do not read by default
2982 2985 pmf = repo[parent].manifest()
2983 2986 mergeadd = dsmodified - set(pmf)
2984 2987 dsadded |= mergeadd
2985 2988 dsmodified -= mergeadd
2986 2989
2987 2990 # if f is a rename, update `names` to also revert the source
2988 2991 cwd = repo.getcwd()
2989 2992 for f in localchanges:
2990 2993 src = repo.dirstate.copied(f)
2991 2994 # XXX should we check for rename down to target node?
2992 2995 if src and src not in names and repo.dirstate[src] == 'r':
2993 2996 dsremoved.add(src)
2994 2997 names[src] = (repo.pathto(src, cwd), True)
2995 2998
2996 2999 # distinguish between file to forget and the other
2997 3000 added = set()
2998 3001 for abs in dsadded:
2999 3002 if repo.dirstate[abs] != 'a':
3000 3003 added.add(abs)
3001 3004 dsadded -= added
3002 3005
3003 3006 for abs in deladded:
3004 3007 if repo.dirstate[abs] == 'a':
3005 3008 dsadded.add(abs)
3006 3009 deladded -= dsadded
3007 3010
3008 3011 # For files marked as removed, we check if an unknown file is present at
3009 3012 # the same path. If a such file exists it may need to be backed up.
3010 3013 # Making the distinction at this stage helps have simpler backup
3011 3014 # logic.
3012 3015 removunk = set()
3013 3016 for abs in removed:
3014 3017 target = repo.wjoin(abs)
3015 3018 if os.path.lexists(target):
3016 3019 removunk.add(abs)
3017 3020 removed -= removunk
3018 3021
3019 3022 dsremovunk = set()
3020 3023 for abs in dsremoved:
3021 3024 target = repo.wjoin(abs)
3022 3025 if os.path.lexists(target):
3023 3026 dsremovunk.add(abs)
3024 3027 dsremoved -= dsremovunk
3025 3028
3026 3029 # action to be actually performed by revert
3027 3030 # (<list of file>, message>) tuple
3028 3031 actions = {'revert': ([], _('reverting %s\n')),
3029 3032 'add': ([], _('adding %s\n')),
3030 3033 'remove': ([], _('removing %s\n')),
3031 3034 'drop': ([], _('removing %s\n')),
3032 3035 'forget': ([], _('forgetting %s\n')),
3033 3036 'undelete': ([], _('undeleting %s\n')),
3034 3037 'noop': (None, _('no changes needed to %s\n')),
3035 3038 'unknown': (None, _('file not managed: %s\n')),
3036 3039 }
3037 3040
3038 3041 # "constant" that convey the backup strategy.
3039 3042 # All set to `discard` if `no-backup` is set do avoid checking
3040 3043 # no_backup lower in the code.
3041 3044 # These values are ordered for comparison purposes
3042 3045 backup = 2 # unconditionally do backup
3043 3046 check = 1 # check if the existing file differs from target
3044 3047 discard = 0 # never do backup
3045 3048 if opts.get('no_backup'):
3046 3049 backup = check = discard
3047 3050
3048 3051 backupanddel = actions['remove']
3049 3052 if not opts.get('no_backup'):
3050 3053 backupanddel = actions['drop']
3051 3054
3052 3055 disptable = (
3053 3056 # dispatch table:
3054 3057 # file state
3055 3058 # action
3056 3059 # make backup
3057 3060
3058 3061 ## Sets that results that will change file on disk
3059 3062 # Modified compared to target, no local change
3060 3063 (modified, actions['revert'], discard),
3061 3064 # Modified compared to target, but local file is deleted
3062 3065 (deleted, actions['revert'], discard),
3063 3066 # Modified compared to target, local change
3064 3067 (dsmodified, actions['revert'], backup),
3065 3068 # Added since target
3066 3069 (added, actions['remove'], discard),
3067 3070 # Added in working directory
3068 3071 (dsadded, actions['forget'], discard),
3069 3072 # Added since target, have local modification
3070 3073 (modadded, backupanddel, backup),
3071 3074 # Added since target but file is missing in working directory
3072 3075 (deladded, actions['drop'], discard),
3073 3076 # Removed since target, before working copy parent
3074 3077 (removed, actions['add'], discard),
3075 3078 # Same as `removed` but an unknown file exists at the same path
3076 3079 (removunk, actions['add'], check),
3077 3080 # Removed since targe, marked as such in working copy parent
3078 3081 (dsremoved, actions['undelete'], discard),
3079 3082 # Same as `dsremoved` but an unknown file exists at the same path
3080 3083 (dsremovunk, actions['undelete'], check),
3081 3084 ## the following sets does not result in any file changes
3082 3085 # File with no modification
3083 3086 (clean, actions['noop'], discard),
3084 3087 # Existing file, not tracked anywhere
3085 3088 (unknown, actions['unknown'], discard),
3086 3089 )
3087 3090
3088 3091 for abs, (rel, exact) in sorted(names.items()):
3089 3092 # target file to be touch on disk (relative to cwd)
3090 3093 target = repo.wjoin(abs)
3091 3094 # search the entry in the dispatch table.
3092 3095 # if the file is in any of these sets, it was touched in the working
3093 3096 # directory parent and we are sure it needs to be reverted.
3094 3097 for table, (xlist, msg), dobackup in disptable:
3095 3098 if abs not in table:
3096 3099 continue
3097 3100 if xlist is not None:
3098 3101 xlist.append(abs)
3099 3102 if dobackup and (backup <= dobackup
3100 3103 or wctx[abs].cmp(ctx[abs])):
3101 3104 bakname = origpath(ui, repo, rel)
3102 3105 ui.note(_('saving current version of %s as %s\n') %
3103 3106 (rel, bakname))
3104 3107 if not opts.get('dry_run'):
3105 3108 if interactive:
3106 3109 util.copyfile(target, bakname)
3107 3110 else:
3108 3111 util.rename(target, bakname)
3109 3112 if ui.verbose or not exact:
3110 3113 if not isinstance(msg, basestring):
3111 3114 msg = msg(abs)
3112 3115 ui.status(msg % rel)
3113 3116 elif exact:
3114 3117 ui.warn(msg % rel)
3115 3118 break
3116 3119
3117 3120 if not opts.get('dry_run'):
3118 3121 needdata = ('revert', 'add', 'undelete')
3119 3122 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3120 3123 _performrevert(repo, parents, ctx, actions, interactive)
3121 3124
3122 3125 if targetsubs:
3123 3126 # Revert the subrepos on the revert list
3124 3127 for sub in targetsubs:
3125 3128 try:
3126 3129 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3127 3130 except KeyError:
3128 3131 raise error.Abort("subrepository '%s' does not exist in %s!"
3129 3132 % (sub, short(ctx.node())))
3130 3133 finally:
3131 3134 wlock.release()
3132 3135
3133 3136 def origpath(ui, repo, filepath):
3134 3137 '''customize where .orig files are created
3135 3138
3136 3139 Fetch user defined path from config file: [ui] origbackuppath = <path>
3137 3140 Fall back to default (filepath) if not specified
3138 3141 '''
3139 3142 origbackuppath = ui.config('ui', 'origbackuppath', None)
3140 3143 if origbackuppath is None:
3141 3144 return filepath + ".orig"
3142 3145
3143 3146 filepathfromroot = os.path.relpath(filepath, start=repo.root)
3144 3147 fullorigpath = repo.wjoin(origbackuppath, filepathfromroot)
3145 3148
3146 3149 origbackupdir = repo.vfs.dirname(fullorigpath)
3147 3150 if not repo.vfs.exists(origbackupdir):
3148 3151 ui.note(_('creating directory: %s\n') % origbackupdir)
3149 3152 util.makedirs(origbackupdir)
3150 3153
3151 3154 return fullorigpath + ".orig"
3152 3155
3153 3156 def _revertprefetch(repo, ctx, *files):
3154 3157 """Let extension changing the storage layer prefetch content"""
3155 3158 pass
3156 3159
3157 3160 def _performrevert(repo, parents, ctx, actions, interactive=False):
3158 3161 """function that actually perform all the actions computed for revert
3159 3162
3160 3163 This is an independent function to let extension to plug in and react to
3161 3164 the imminent revert.
3162 3165
3163 3166 Make sure you have the working directory locked when calling this function.
3164 3167 """
3165 3168 parent, p2 = parents
3166 3169 node = ctx.node()
3167 3170 def checkout(f):
3168 3171 fc = ctx[f]
3169 3172 repo.wwrite(f, fc.data(), fc.flags())
3170 3173
3171 3174 audit_path = pathutil.pathauditor(repo.root)
3172 3175 for f in actions['forget'][0]:
3173 3176 repo.dirstate.drop(f)
3174 3177 for f in actions['remove'][0]:
3175 3178 audit_path(f)
3176 3179 try:
3177 3180 util.unlinkpath(repo.wjoin(f))
3178 3181 except OSError:
3179 3182 pass
3180 3183 repo.dirstate.remove(f)
3181 3184 for f in actions['drop'][0]:
3182 3185 audit_path(f)
3183 3186 repo.dirstate.remove(f)
3184 3187
3185 3188 normal = None
3186 3189 if node == parent:
3187 3190 # We're reverting to our parent. If possible, we'd like status
3188 3191 # to report the file as clean. We have to use normallookup for
3189 3192 # merges to avoid losing information about merged/dirty files.
3190 3193 if p2 != nullid:
3191 3194 normal = repo.dirstate.normallookup
3192 3195 else:
3193 3196 normal = repo.dirstate.normal
3194 3197
3195 3198 newlyaddedandmodifiedfiles = set()
3196 3199 if interactive:
3197 3200 # Prompt the user for changes to revert
3198 3201 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3199 3202 m = scmutil.match(ctx, torevert, {})
3200 3203 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3201 3204 diffopts.nodates = True
3202 3205 diffopts.git = True
3203 3206 reversehunks = repo.ui.configbool('experimental',
3204 3207 'revertalternateinteractivemode',
3205 3208 True)
3206 3209 if reversehunks:
3207 3210 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3208 3211 else:
3209 3212 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3210 3213 originalchunks = patch.parsepatch(diff)
3211 3214
3212 3215 try:
3213 3216
3214 3217 chunks, opts = recordfilter(repo.ui, originalchunks)
3215 3218 if reversehunks:
3216 3219 chunks = patch.reversehunks(chunks)
3217 3220
3218 3221 except patch.PatchError as err:
3219 3222 raise error.Abort(_('error parsing patch: %s') % err)
3220 3223
3221 3224 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3222 3225 # Apply changes
3223 3226 fp = cStringIO.StringIO()
3224 3227 for c in chunks:
3225 3228 c.write(fp)
3226 3229 dopatch = fp.tell()
3227 3230 fp.seek(0)
3228 3231 if dopatch:
3229 3232 try:
3230 3233 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3231 3234 except patch.PatchError as err:
3232 3235 raise error.Abort(str(err))
3233 3236 del fp
3234 3237 else:
3235 3238 for f in actions['revert'][0]:
3236 3239 checkout(f)
3237 3240 if normal:
3238 3241 normal(f)
3239 3242
3240 3243 for f in actions['add'][0]:
3241 3244 # Don't checkout modified files, they are already created by the diff
3242 3245 if f not in newlyaddedandmodifiedfiles:
3243 3246 checkout(f)
3244 3247 repo.dirstate.add(f)
3245 3248
3246 3249 normal = repo.dirstate.normallookup
3247 3250 if node == parent and p2 == nullid:
3248 3251 normal = repo.dirstate.normal
3249 3252 for f in actions['undelete'][0]:
3250 3253 checkout(f)
3251 3254 normal(f)
3252 3255
3253 3256 copied = copies.pathcopies(repo[parent], ctx)
3254 3257
3255 3258 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3256 3259 if f in copied:
3257 3260 repo.dirstate.copy(copied[f], f)
3258 3261
3259 3262 def command(table):
3260 3263 """Returns a function object to be used as a decorator for making commands.
3261 3264
3262 3265 This function receives a command table as its argument. The table should
3263 3266 be a dict.
3264 3267
3265 3268 The returned function can be used as a decorator for adding commands
3266 3269 to that command table. This function accepts multiple arguments to define
3267 3270 a command.
3268 3271
3269 3272 The first argument is the command name.
3270 3273
3271 3274 The options argument is an iterable of tuples defining command arguments.
3272 3275 See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
3273 3276
3274 3277 The synopsis argument defines a short, one line summary of how to use the
3275 3278 command. This shows up in the help output.
3276 3279
3277 3280 The norepo argument defines whether the command does not require a
3278 3281 local repository. Most commands operate against a repository, thus the
3279 3282 default is False.
3280 3283
3281 3284 The optionalrepo argument defines whether the command optionally requires
3282 3285 a local repository.
3283 3286
3284 3287 The inferrepo argument defines whether to try to find a repository from the
3285 3288 command line arguments. If True, arguments will be examined for potential
3286 3289 repository locations. See ``findrepo()``. If a repository is found, it
3287 3290 will be used.
3288 3291 """
3289 3292 def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False,
3290 3293 inferrepo=False):
3291 3294 def decorator(func):
3292 3295 if synopsis:
3293 3296 table[name] = func, list(options), synopsis
3294 3297 else:
3295 3298 table[name] = func, list(options)
3296 3299
3297 3300 if norepo:
3298 3301 # Avoid import cycle.
3299 3302 import commands
3300 3303 commands.norepo += ' %s' % ' '.join(parsealiases(name))
3301 3304
3302 3305 if optionalrepo:
3303 3306 import commands
3304 3307 commands.optionalrepo += ' %s' % ' '.join(parsealiases(name))
3305 3308
3306 3309 if inferrepo:
3307 3310 import commands
3308 3311 commands.inferrepo += ' %s' % ' '.join(parsealiases(name))
3309 3312
3310 3313 return func
3311 3314 return decorator
3312 3315
3313 3316 return cmd
3314 3317
3315 3318 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3316 3319 # commands.outgoing. "missing" is "missing" of the result of
3317 3320 # "findcommonoutgoing()"
3318 3321 outgoinghooks = util.hooks()
3319 3322
3320 3323 # a list of (ui, repo) functions called by commands.summary
3321 3324 summaryhooks = util.hooks()
3322 3325
3323 3326 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3324 3327 #
3325 3328 # functions should return tuple of booleans below, if 'changes' is None:
3326 3329 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3327 3330 #
3328 3331 # otherwise, 'changes' is a tuple of tuples below:
3329 3332 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3330 3333 # - (desturl, destbranch, destpeer, outgoing)
3331 3334 summaryremotehooks = util.hooks()
3332 3335
3333 3336 # A list of state files kept by multistep operations like graft.
3334 3337 # Since graft cannot be aborted, it is considered 'clearable' by update.
3335 3338 # note: bisect is intentionally excluded
3336 3339 # (state file, clearable, allowcommit, error, hint)
3337 3340 unfinishedstates = [
3338 3341 ('graftstate', True, False, _('graft in progress'),
3339 3342 _("use 'hg graft --continue' or 'hg update' to abort")),
3340 3343 ('updatestate', True, False, _('last update was interrupted'),
3341 3344 _("use 'hg update' to get a consistent checkout"))
3342 3345 ]
3343 3346
3344 3347 def checkunfinished(repo, commit=False):
3345 3348 '''Look for an unfinished multistep operation, like graft, and abort
3346 3349 if found. It's probably good to check this right before
3347 3350 bailifchanged().
3348 3351 '''
3349 3352 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3350 3353 if commit and allowcommit:
3351 3354 continue
3352 3355 if repo.vfs.exists(f):
3353 3356 raise error.Abort(msg, hint=hint)
3354 3357
3355 3358 def clearunfinished(repo):
3356 3359 '''Check for unfinished operations (as above), and clear the ones
3357 3360 that are clearable.
3358 3361 '''
3359 3362 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3360 3363 if not clearable and repo.vfs.exists(f):
3361 3364 raise error.Abort(msg, hint=hint)
3362 3365 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3363 3366 if clearable and repo.vfs.exists(f):
3364 3367 util.unlink(repo.join(f))
3365 3368
3366 3369 class dirstateguard(object):
3367 3370 '''Restore dirstate at unexpected failure.
3368 3371
3369 3372 At the construction, this class does:
3370 3373
3371 3374 - write current ``repo.dirstate`` out, and
3372 3375 - save ``.hg/dirstate`` into the backup file
3373 3376
3374 3377 This restores ``.hg/dirstate`` from backup file, if ``release()``
3375 3378 is invoked before ``close()``.
3376 3379
3377 3380 This just removes the backup file at ``close()`` before ``release()``.
3378 3381 '''
3379 3382
3380 3383 def __init__(self, repo, name):
3381 3384 self._repo = repo
3382 3385 self._suffix = '.backup.%s.%d' % (name, id(self))
3383 3386 repo.dirstate._savebackup(repo.currenttransaction(), self._suffix)
3384 3387 self._active = True
3385 3388 self._closed = False
3386 3389
3387 3390 def __del__(self):
3388 3391 if self._active: # still active
3389 3392 # this may occur, even if this class is used correctly:
3390 3393 # for example, releasing other resources like transaction
3391 3394 # may raise exception before ``dirstateguard.release`` in
3392 3395 # ``release(tr, ....)``.
3393 3396 self._abort()
3394 3397
3395 3398 def close(self):
3396 3399 if not self._active: # already inactivated
3397 3400 msg = (_("can't close already inactivated backup: dirstate%s")
3398 3401 % self._suffix)
3399 3402 raise error.Abort(msg)
3400 3403
3401 3404 self._repo.dirstate._clearbackup(self._repo.currenttransaction(),
3402 3405 self._suffix)
3403 3406 self._active = False
3404 3407 self._closed = True
3405 3408
3406 3409 def _abort(self):
3407 3410 self._repo.dirstate._restorebackup(self._repo.currenttransaction(),
3408 3411 self._suffix)
3409 3412 self._active = False
3410 3413
3411 3414 def release(self):
3412 3415 if not self._closed:
3413 3416 if not self._active: # already inactivated
3414 3417 msg = (_("can't release already inactivated backup:"
3415 3418 " dirstate%s")
3416 3419 % self._suffix)
3417 3420 raise error.Abort(msg)
3418 3421 self._abort()
General Comments 0
You need to be logged in to leave comments. Login now