##// END OF EJS Templates
cmdutil.changeset_printer: pass context into showpatch()...
Gregory Szorc -
r27065:93bcc73d default
parent child Browse files
Show More
@@ -1,3402 +1,3403 b''
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import hex, bin, nullid, nullrev, short
9 9 from i18n import _
10 10 import os, sys, errno, re, tempfile, cStringIO, shutil
11 11 import util, scmutil, templater, patch, error, templatekw, revlog, copies
12 12 import match as matchmod
13 13 import repair, graphmod, revset, phases, obsolete, pathutil
14 14 import changelog
15 15 import bookmarks
16 16 import encoding
17 17 import formatter
18 18 import crecord as crecordmod
19 19 import lock as lockmod
20 20
21 21 def ishunk(x):
22 22 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
23 23 return isinstance(x, hunkclasses)
24 24
25 25 def newandmodified(chunks, originalchunks):
26 26 newlyaddedandmodifiedfiles = set()
27 27 for chunk in chunks:
28 28 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
29 29 originalchunks:
30 30 newlyaddedandmodifiedfiles.add(chunk.header.filename())
31 31 return newlyaddedandmodifiedfiles
32 32
33 33 def parsealiases(cmd):
34 34 return cmd.lstrip("^").split("|")
35 35
36 36 def setupwrapcolorwrite(ui):
37 37 # wrap ui.write so diff output can be labeled/colorized
38 38 def wrapwrite(orig, *args, **kw):
39 39 label = kw.pop('label', '')
40 40 for chunk, l in patch.difflabel(lambda: args):
41 41 orig(chunk, label=label + l)
42 42
43 43 oldwrite = ui.write
44 44 def wrap(*args, **kwargs):
45 45 return wrapwrite(oldwrite, *args, **kwargs)
46 46 setattr(ui, 'write', wrap)
47 47 return oldwrite
48 48
49 49 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
50 50 if usecurses:
51 51 if testfile:
52 52 recordfn = crecordmod.testdecorator(testfile,
53 53 crecordmod.testchunkselector)
54 54 else:
55 55 recordfn = crecordmod.chunkselector
56 56
57 57 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
58 58
59 59 else:
60 60 return patch.filterpatch(ui, originalhunks, operation)
61 61
62 62 def recordfilter(ui, originalhunks, operation=None):
63 63 """ Prompts the user to filter the originalhunks and return a list of
64 64 selected hunks.
65 65 *operation* is used for ui purposes to indicate the user
66 66 what kind of filtering they are doing: reverting, committing, shelving, etc.
67 67 *operation* has to be a translated string.
68 68 """
69 69 usecurses = ui.configbool('experimental', 'crecord', False)
70 70 testfile = ui.config('experimental', 'crecordtest', None)
71 71 oldwrite = setupwrapcolorwrite(ui)
72 72 try:
73 73 newchunks = filterchunks(ui, originalhunks, usecurses, testfile,
74 74 operation)
75 75 finally:
76 76 ui.write = oldwrite
77 77 return newchunks
78 78
79 79 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
80 80 filterfn, *pats, **opts):
81 81 import merge as mergemod
82 82
83 83 if not ui.interactive():
84 84 if cmdsuggest:
85 85 msg = _('running non-interactively, use %s instead') % cmdsuggest
86 86 else:
87 87 msg = _('running non-interactively')
88 88 raise error.Abort(msg)
89 89
90 90 # make sure username is set before going interactive
91 91 if not opts.get('user'):
92 92 ui.username() # raise exception, username not provided
93 93
94 94 def recordfunc(ui, repo, message, match, opts):
95 95 """This is generic record driver.
96 96
97 97 Its job is to interactively filter local changes, and
98 98 accordingly prepare working directory into a state in which the
99 99 job can be delegated to a non-interactive commit command such as
100 100 'commit' or 'qrefresh'.
101 101
102 102 After the actual job is done by non-interactive command, the
103 103 working directory is restored to its original state.
104 104
105 105 In the end we'll record interesting changes, and everything else
106 106 will be left in place, so the user can continue working.
107 107 """
108 108
109 109 checkunfinished(repo, commit=True)
110 110 merge = len(repo[None].parents()) > 1
111 111 if merge:
112 112 raise error.Abort(_('cannot partially commit a merge '
113 113 '(use "hg commit" instead)'))
114 114
115 115 status = repo.status(match=match)
116 116 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
117 117 diffopts.nodates = True
118 118 diffopts.git = True
119 119 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
120 120 originalchunks = patch.parsepatch(originaldiff)
121 121
122 122 # 1. filter patch, so we have intending-to apply subset of it
123 123 try:
124 124 chunks = filterfn(ui, originalchunks)
125 125 except patch.PatchError as err:
126 126 raise error.Abort(_('error parsing patch: %s') % err)
127 127
128 128 # We need to keep a backup of files that have been newly added and
129 129 # modified during the recording process because there is a previous
130 130 # version without the edit in the workdir
131 131 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
132 132 contenders = set()
133 133 for h in chunks:
134 134 try:
135 135 contenders.update(set(h.files()))
136 136 except AttributeError:
137 137 pass
138 138
139 139 changed = status.modified + status.added + status.removed
140 140 newfiles = [f for f in changed if f in contenders]
141 141 if not newfiles:
142 142 ui.status(_('no changes to record\n'))
143 143 return 0
144 144
145 145 modified = set(status.modified)
146 146
147 147 # 2. backup changed files, so we can restore them in the end
148 148
149 149 if backupall:
150 150 tobackup = changed
151 151 else:
152 152 tobackup = [f for f in newfiles if f in modified or f in \
153 153 newlyaddedandmodifiedfiles]
154 154 backups = {}
155 155 if tobackup:
156 156 backupdir = repo.join('record-backups')
157 157 try:
158 158 os.mkdir(backupdir)
159 159 except OSError as err:
160 160 if err.errno != errno.EEXIST:
161 161 raise
162 162 try:
163 163 # backup continues
164 164 for f in tobackup:
165 165 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
166 166 dir=backupdir)
167 167 os.close(fd)
168 168 ui.debug('backup %r as %r\n' % (f, tmpname))
169 169 util.copyfile(repo.wjoin(f), tmpname)
170 170 shutil.copystat(repo.wjoin(f), tmpname)
171 171 backups[f] = tmpname
172 172
173 173 fp = cStringIO.StringIO()
174 174 for c in chunks:
175 175 fname = c.filename()
176 176 if fname in backups:
177 177 c.write(fp)
178 178 dopatch = fp.tell()
179 179 fp.seek(0)
180 180
181 181 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
182 182 # 3a. apply filtered patch to clean repo (clean)
183 183 if backups:
184 184 # Equivalent to hg.revert
185 185 choices = lambda key: key in backups
186 186 mergemod.update(repo, repo.dirstate.p1(),
187 187 False, True, choices)
188 188
189 189 # 3b. (apply)
190 190 if dopatch:
191 191 try:
192 192 ui.debug('applying patch\n')
193 193 ui.debug(fp.getvalue())
194 194 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
195 195 except patch.PatchError as err:
196 196 raise error.Abort(str(err))
197 197 del fp
198 198
199 199 # 4. We prepared working directory according to filtered
200 200 # patch. Now is the time to delegate the job to
201 201 # commit/qrefresh or the like!
202 202
203 203 # Make all of the pathnames absolute.
204 204 newfiles = [repo.wjoin(nf) for nf in newfiles]
205 205 return commitfunc(ui, repo, *newfiles, **opts)
206 206 finally:
207 207 # 5. finally restore backed-up files
208 208 try:
209 209 dirstate = repo.dirstate
210 210 for realname, tmpname in backups.iteritems():
211 211 ui.debug('restoring %r to %r\n' % (tmpname, realname))
212 212
213 213 if dirstate[realname] == 'n':
214 214 # without normallookup, restoring timestamp
215 215 # may cause partially committed files
216 216 # to be treated as unmodified
217 217 dirstate.normallookup(realname)
218 218
219 219 util.copyfile(tmpname, repo.wjoin(realname))
220 220 # Our calls to copystat() here and above are a
221 221 # hack to trick any editors that have f open that
222 222 # we haven't modified them.
223 223 #
224 224 # Also note that this racy as an editor could
225 225 # notice the file's mtime before we've finished
226 226 # writing it.
227 227 shutil.copystat(tmpname, repo.wjoin(realname))
228 228 os.unlink(tmpname)
229 229 if tobackup:
230 230 os.rmdir(backupdir)
231 231 except OSError:
232 232 pass
233 233
234 234 def recordinwlock(ui, repo, message, match, opts):
235 235 wlock = repo.wlock()
236 236 try:
237 237 return recordfunc(ui, repo, message, match, opts)
238 238 finally:
239 239 wlock.release()
240 240
241 241 return commit(ui, repo, recordinwlock, pats, opts)
242 242
243 243 def findpossible(cmd, table, strict=False):
244 244 """
245 245 Return cmd -> (aliases, command table entry)
246 246 for each matching command.
247 247 Return debug commands (or their aliases) only if no normal command matches.
248 248 """
249 249 choice = {}
250 250 debugchoice = {}
251 251
252 252 if cmd in table:
253 253 # short-circuit exact matches, "log" alias beats "^log|history"
254 254 keys = [cmd]
255 255 else:
256 256 keys = table.keys()
257 257
258 258 allcmds = []
259 259 for e in keys:
260 260 aliases = parsealiases(e)
261 261 allcmds.extend(aliases)
262 262 found = None
263 263 if cmd in aliases:
264 264 found = cmd
265 265 elif not strict:
266 266 for a in aliases:
267 267 if a.startswith(cmd):
268 268 found = a
269 269 break
270 270 if found is not None:
271 271 if aliases[0].startswith("debug") or found.startswith("debug"):
272 272 debugchoice[found] = (aliases, table[e])
273 273 else:
274 274 choice[found] = (aliases, table[e])
275 275
276 276 if not choice and debugchoice:
277 277 choice = debugchoice
278 278
279 279 return choice, allcmds
280 280
281 281 def findcmd(cmd, table, strict=True):
282 282 """Return (aliases, command table entry) for command string."""
283 283 choice, allcmds = findpossible(cmd, table, strict)
284 284
285 285 if cmd in choice:
286 286 return choice[cmd]
287 287
288 288 if len(choice) > 1:
289 289 clist = choice.keys()
290 290 clist.sort()
291 291 raise error.AmbiguousCommand(cmd, clist)
292 292
293 293 if choice:
294 294 return choice.values()[0]
295 295
296 296 raise error.UnknownCommand(cmd, allcmds)
297 297
298 298 def findrepo(p):
299 299 while not os.path.isdir(os.path.join(p, ".hg")):
300 300 oldp, p = p, os.path.dirname(p)
301 301 if p == oldp:
302 302 return None
303 303
304 304 return p
305 305
306 306 def bailifchanged(repo, merge=True):
307 307 if merge and repo.dirstate.p2() != nullid:
308 308 raise error.Abort(_('outstanding uncommitted merge'))
309 309 modified, added, removed, deleted = repo.status()[:4]
310 310 if modified or added or removed or deleted:
311 311 raise error.Abort(_('uncommitted changes'))
312 312 ctx = repo[None]
313 313 for s in sorted(ctx.substate):
314 314 ctx.sub(s).bailifchanged()
315 315
316 316 def logmessage(ui, opts):
317 317 """ get the log message according to -m and -l option """
318 318 message = opts.get('message')
319 319 logfile = opts.get('logfile')
320 320
321 321 if message and logfile:
322 322 raise error.Abort(_('options --message and --logfile are mutually '
323 323 'exclusive'))
324 324 if not message and logfile:
325 325 try:
326 326 if logfile == '-':
327 327 message = ui.fin.read()
328 328 else:
329 329 message = '\n'.join(util.readfile(logfile).splitlines())
330 330 except IOError as inst:
331 331 raise error.Abort(_("can't read commit message '%s': %s") %
332 332 (logfile, inst.strerror))
333 333 return message
334 334
335 335 def mergeeditform(ctxorbool, baseformname):
336 336 """return appropriate editform name (referencing a committemplate)
337 337
338 338 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
339 339 merging is committed.
340 340
341 341 This returns baseformname with '.merge' appended if it is a merge,
342 342 otherwise '.normal' is appended.
343 343 """
344 344 if isinstance(ctxorbool, bool):
345 345 if ctxorbool:
346 346 return baseformname + ".merge"
347 347 elif 1 < len(ctxorbool.parents()):
348 348 return baseformname + ".merge"
349 349
350 350 return baseformname + ".normal"
351 351
352 352 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
353 353 editform='', **opts):
354 354 """get appropriate commit message editor according to '--edit' option
355 355
356 356 'finishdesc' is a function to be called with edited commit message
357 357 (= 'description' of the new changeset) just after editing, but
358 358 before checking empty-ness. It should return actual text to be
359 359 stored into history. This allows to change description before
360 360 storing.
361 361
362 362 'extramsg' is a extra message to be shown in the editor instead of
363 363 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
364 364 is automatically added.
365 365
366 366 'editform' is a dot-separated list of names, to distinguish
367 367 the purpose of commit text editing.
368 368
369 369 'getcommiteditor' returns 'commitforceeditor' regardless of
370 370 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
371 371 they are specific for usage in MQ.
372 372 """
373 373 if edit or finishdesc or extramsg:
374 374 return lambda r, c, s: commitforceeditor(r, c, s,
375 375 finishdesc=finishdesc,
376 376 extramsg=extramsg,
377 377 editform=editform)
378 378 elif editform:
379 379 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
380 380 else:
381 381 return commiteditor
382 382
383 383 def loglimit(opts):
384 384 """get the log limit according to option -l/--limit"""
385 385 limit = opts.get('limit')
386 386 if limit:
387 387 try:
388 388 limit = int(limit)
389 389 except ValueError:
390 390 raise error.Abort(_('limit must be a positive integer'))
391 391 if limit <= 0:
392 392 raise error.Abort(_('limit must be positive'))
393 393 else:
394 394 limit = None
395 395 return limit
396 396
397 397 def makefilename(repo, pat, node, desc=None,
398 398 total=None, seqno=None, revwidth=None, pathname=None):
399 399 node_expander = {
400 400 'H': lambda: hex(node),
401 401 'R': lambda: str(repo.changelog.rev(node)),
402 402 'h': lambda: short(node),
403 403 'm': lambda: re.sub('[^\w]', '_', str(desc))
404 404 }
405 405 expander = {
406 406 '%': lambda: '%',
407 407 'b': lambda: os.path.basename(repo.root),
408 408 }
409 409
410 410 try:
411 411 if node:
412 412 expander.update(node_expander)
413 413 if node:
414 414 expander['r'] = (lambda:
415 415 str(repo.changelog.rev(node)).zfill(revwidth or 0))
416 416 if total is not None:
417 417 expander['N'] = lambda: str(total)
418 418 if seqno is not None:
419 419 expander['n'] = lambda: str(seqno)
420 420 if total is not None and seqno is not None:
421 421 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
422 422 if pathname is not None:
423 423 expander['s'] = lambda: os.path.basename(pathname)
424 424 expander['d'] = lambda: os.path.dirname(pathname) or '.'
425 425 expander['p'] = lambda: pathname
426 426
427 427 newname = []
428 428 patlen = len(pat)
429 429 i = 0
430 430 while i < patlen:
431 431 c = pat[i]
432 432 if c == '%':
433 433 i += 1
434 434 c = pat[i]
435 435 c = expander[c]()
436 436 newname.append(c)
437 437 i += 1
438 438 return ''.join(newname)
439 439 except KeyError as inst:
440 440 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
441 441 inst.args[0])
442 442
443 443 def makefileobj(repo, pat, node=None, desc=None, total=None,
444 444 seqno=None, revwidth=None, mode='wb', modemap=None,
445 445 pathname=None):
446 446
447 447 writable = mode not in ('r', 'rb')
448 448
449 449 if not pat or pat == '-':
450 450 if writable:
451 451 fp = repo.ui.fout
452 452 else:
453 453 fp = repo.ui.fin
454 454 if util.safehasattr(fp, 'fileno'):
455 455 return os.fdopen(os.dup(fp.fileno()), mode)
456 456 else:
457 457 # if this fp can't be duped properly, return
458 458 # a dummy object that can be closed
459 459 class wrappedfileobj(object):
460 460 noop = lambda x: None
461 461 def __init__(self, f):
462 462 self.f = f
463 463 def __getattr__(self, attr):
464 464 if attr == 'close':
465 465 return self.noop
466 466 else:
467 467 return getattr(self.f, attr)
468 468
469 469 return wrappedfileobj(fp)
470 470 if util.safehasattr(pat, 'write') and writable:
471 471 return pat
472 472 if util.safehasattr(pat, 'read') and 'r' in mode:
473 473 return pat
474 474 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
475 475 if modemap is not None:
476 476 mode = modemap.get(fn, mode)
477 477 if mode == 'wb':
478 478 modemap[fn] = 'ab'
479 479 return open(fn, mode)
480 480
481 481 def openrevlog(repo, cmd, file_, opts):
482 482 """opens the changelog, manifest, a filelog or a given revlog"""
483 483 cl = opts['changelog']
484 484 mf = opts['manifest']
485 485 dir = opts['dir']
486 486 msg = None
487 487 if cl and mf:
488 488 msg = _('cannot specify --changelog and --manifest at the same time')
489 489 elif cl and dir:
490 490 msg = _('cannot specify --changelog and --dir at the same time')
491 491 elif cl or mf:
492 492 if file_:
493 493 msg = _('cannot specify filename with --changelog or --manifest')
494 494 elif not repo:
495 495 msg = _('cannot specify --changelog or --manifest or --dir '
496 496 'without a repository')
497 497 if msg:
498 498 raise error.Abort(msg)
499 499
500 500 r = None
501 501 if repo:
502 502 if cl:
503 503 r = repo.unfiltered().changelog
504 504 elif dir:
505 505 if 'treemanifest' not in repo.requirements:
506 506 raise error.Abort(_("--dir can only be used on repos with "
507 507 "treemanifest enabled"))
508 508 dirlog = repo.dirlog(file_)
509 509 if len(dirlog):
510 510 r = dirlog
511 511 elif mf:
512 512 r = repo.manifest
513 513 elif file_:
514 514 filelog = repo.file(file_)
515 515 if len(filelog):
516 516 r = filelog
517 517 if not r:
518 518 if not file_:
519 519 raise error.CommandError(cmd, _('invalid arguments'))
520 520 if not os.path.isfile(file_):
521 521 raise error.Abort(_("revlog '%s' not found") % file_)
522 522 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
523 523 file_[:-2] + ".i")
524 524 return r
525 525
526 526 def copy(ui, repo, pats, opts, rename=False):
527 527 # called with the repo lock held
528 528 #
529 529 # hgsep => pathname that uses "/" to separate directories
530 530 # ossep => pathname that uses os.sep to separate directories
531 531 cwd = repo.getcwd()
532 532 targets = {}
533 533 after = opts.get("after")
534 534 dryrun = opts.get("dry_run")
535 535 wctx = repo[None]
536 536
537 537 def walkpat(pat):
538 538 srcs = []
539 539 if after:
540 540 badstates = '?'
541 541 else:
542 542 badstates = '?r'
543 543 m = scmutil.match(repo[None], [pat], opts, globbed=True)
544 544 for abs in repo.walk(m):
545 545 state = repo.dirstate[abs]
546 546 rel = m.rel(abs)
547 547 exact = m.exact(abs)
548 548 if state in badstates:
549 549 if exact and state == '?':
550 550 ui.warn(_('%s: not copying - file is not managed\n') % rel)
551 551 if exact and state == 'r':
552 552 ui.warn(_('%s: not copying - file has been marked for'
553 553 ' remove\n') % rel)
554 554 continue
555 555 # abs: hgsep
556 556 # rel: ossep
557 557 srcs.append((abs, rel, exact))
558 558 return srcs
559 559
560 560 # abssrc: hgsep
561 561 # relsrc: ossep
562 562 # otarget: ossep
563 563 def copyfile(abssrc, relsrc, otarget, exact):
564 564 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
565 565 if '/' in abstarget:
566 566 # We cannot normalize abstarget itself, this would prevent
567 567 # case only renames, like a => A.
568 568 abspath, absname = abstarget.rsplit('/', 1)
569 569 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
570 570 reltarget = repo.pathto(abstarget, cwd)
571 571 target = repo.wjoin(abstarget)
572 572 src = repo.wjoin(abssrc)
573 573 state = repo.dirstate[abstarget]
574 574
575 575 scmutil.checkportable(ui, abstarget)
576 576
577 577 # check for collisions
578 578 prevsrc = targets.get(abstarget)
579 579 if prevsrc is not None:
580 580 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
581 581 (reltarget, repo.pathto(abssrc, cwd),
582 582 repo.pathto(prevsrc, cwd)))
583 583 return
584 584
585 585 # check for overwrites
586 586 exists = os.path.lexists(target)
587 587 samefile = False
588 588 if exists and abssrc != abstarget:
589 589 if (repo.dirstate.normalize(abssrc) ==
590 590 repo.dirstate.normalize(abstarget)):
591 591 if not rename:
592 592 ui.warn(_("%s: can't copy - same file\n") % reltarget)
593 593 return
594 594 exists = False
595 595 samefile = True
596 596
597 597 if not after and exists or after and state in 'mn':
598 598 if not opts['force']:
599 599 ui.warn(_('%s: not overwriting - file exists\n') %
600 600 reltarget)
601 601 return
602 602
603 603 if after:
604 604 if not exists:
605 605 if rename:
606 606 ui.warn(_('%s: not recording move - %s does not exist\n') %
607 607 (relsrc, reltarget))
608 608 else:
609 609 ui.warn(_('%s: not recording copy - %s does not exist\n') %
610 610 (relsrc, reltarget))
611 611 return
612 612 elif not dryrun:
613 613 try:
614 614 if exists:
615 615 os.unlink(target)
616 616 targetdir = os.path.dirname(target) or '.'
617 617 if not os.path.isdir(targetdir):
618 618 os.makedirs(targetdir)
619 619 if samefile:
620 620 tmp = target + "~hgrename"
621 621 os.rename(src, tmp)
622 622 os.rename(tmp, target)
623 623 else:
624 624 util.copyfile(src, target)
625 625 srcexists = True
626 626 except IOError as inst:
627 627 if inst.errno == errno.ENOENT:
628 628 ui.warn(_('%s: deleted in working directory\n') % relsrc)
629 629 srcexists = False
630 630 else:
631 631 ui.warn(_('%s: cannot copy - %s\n') %
632 632 (relsrc, inst.strerror))
633 633 return True # report a failure
634 634
635 635 if ui.verbose or not exact:
636 636 if rename:
637 637 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
638 638 else:
639 639 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
640 640
641 641 targets[abstarget] = abssrc
642 642
643 643 # fix up dirstate
644 644 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
645 645 dryrun=dryrun, cwd=cwd)
646 646 if rename and not dryrun:
647 647 if not after and srcexists and not samefile:
648 648 util.unlinkpath(repo.wjoin(abssrc))
649 649 wctx.forget([abssrc])
650 650
651 651 # pat: ossep
652 652 # dest ossep
653 653 # srcs: list of (hgsep, hgsep, ossep, bool)
654 654 # return: function that takes hgsep and returns ossep
655 655 def targetpathfn(pat, dest, srcs):
656 656 if os.path.isdir(pat):
657 657 abspfx = pathutil.canonpath(repo.root, cwd, pat)
658 658 abspfx = util.localpath(abspfx)
659 659 if destdirexists:
660 660 striplen = len(os.path.split(abspfx)[0])
661 661 else:
662 662 striplen = len(abspfx)
663 663 if striplen:
664 664 striplen += len(os.sep)
665 665 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
666 666 elif destdirexists:
667 667 res = lambda p: os.path.join(dest,
668 668 os.path.basename(util.localpath(p)))
669 669 else:
670 670 res = lambda p: dest
671 671 return res
672 672
673 673 # pat: ossep
674 674 # dest ossep
675 675 # srcs: list of (hgsep, hgsep, ossep, bool)
676 676 # return: function that takes hgsep and returns ossep
677 677 def targetpathafterfn(pat, dest, srcs):
678 678 if matchmod.patkind(pat):
679 679 # a mercurial pattern
680 680 res = lambda p: os.path.join(dest,
681 681 os.path.basename(util.localpath(p)))
682 682 else:
683 683 abspfx = pathutil.canonpath(repo.root, cwd, pat)
684 684 if len(abspfx) < len(srcs[0][0]):
685 685 # A directory. Either the target path contains the last
686 686 # component of the source path or it does not.
687 687 def evalpath(striplen):
688 688 score = 0
689 689 for s in srcs:
690 690 t = os.path.join(dest, util.localpath(s[0])[striplen:])
691 691 if os.path.lexists(t):
692 692 score += 1
693 693 return score
694 694
695 695 abspfx = util.localpath(abspfx)
696 696 striplen = len(abspfx)
697 697 if striplen:
698 698 striplen += len(os.sep)
699 699 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
700 700 score = evalpath(striplen)
701 701 striplen1 = len(os.path.split(abspfx)[0])
702 702 if striplen1:
703 703 striplen1 += len(os.sep)
704 704 if evalpath(striplen1) > score:
705 705 striplen = striplen1
706 706 res = lambda p: os.path.join(dest,
707 707 util.localpath(p)[striplen:])
708 708 else:
709 709 # a file
710 710 if destdirexists:
711 711 res = lambda p: os.path.join(dest,
712 712 os.path.basename(util.localpath(p)))
713 713 else:
714 714 res = lambda p: dest
715 715 return res
716 716
717 717 pats = scmutil.expandpats(pats)
718 718 if not pats:
719 719 raise error.Abort(_('no source or destination specified'))
720 720 if len(pats) == 1:
721 721 raise error.Abort(_('no destination specified'))
722 722 dest = pats.pop()
723 723 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
724 724 if not destdirexists:
725 725 if len(pats) > 1 or matchmod.patkind(pats[0]):
726 726 raise error.Abort(_('with multiple sources, destination must be an '
727 727 'existing directory'))
728 728 if util.endswithsep(dest):
729 729 raise error.Abort(_('destination %s is not a directory') % dest)
730 730
731 731 tfn = targetpathfn
732 732 if after:
733 733 tfn = targetpathafterfn
734 734 copylist = []
735 735 for pat in pats:
736 736 srcs = walkpat(pat)
737 737 if not srcs:
738 738 continue
739 739 copylist.append((tfn(pat, dest, srcs), srcs))
740 740 if not copylist:
741 741 raise error.Abort(_('no files to copy'))
742 742
743 743 errors = 0
744 744 for targetpath, srcs in copylist:
745 745 for abssrc, relsrc, exact in srcs:
746 746 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
747 747 errors += 1
748 748
749 749 if errors:
750 750 ui.warn(_('(consider using --after)\n'))
751 751
752 752 return errors != 0
753 753
754 754 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
755 755 runargs=None, appendpid=False):
756 756 '''Run a command as a service.'''
757 757
758 758 def writepid(pid):
759 759 if opts['pid_file']:
760 760 if appendpid:
761 761 mode = 'a'
762 762 else:
763 763 mode = 'w'
764 764 fp = open(opts['pid_file'], mode)
765 765 fp.write(str(pid) + '\n')
766 766 fp.close()
767 767
768 768 if opts['daemon'] and not opts['daemon_pipefds']:
769 769 # Signal child process startup with file removal
770 770 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
771 771 os.close(lockfd)
772 772 try:
773 773 if not runargs:
774 774 runargs = util.hgcmd() + sys.argv[1:]
775 775 runargs.append('--daemon-pipefds=%s' % lockpath)
776 776 # Don't pass --cwd to the child process, because we've already
777 777 # changed directory.
778 778 for i in xrange(1, len(runargs)):
779 779 if runargs[i].startswith('--cwd='):
780 780 del runargs[i]
781 781 break
782 782 elif runargs[i].startswith('--cwd'):
783 783 del runargs[i:i + 2]
784 784 break
785 785 def condfn():
786 786 return not os.path.exists(lockpath)
787 787 pid = util.rundetached(runargs, condfn)
788 788 if pid < 0:
789 789 raise error.Abort(_('child process failed to start'))
790 790 writepid(pid)
791 791 finally:
792 792 try:
793 793 os.unlink(lockpath)
794 794 except OSError as e:
795 795 if e.errno != errno.ENOENT:
796 796 raise
797 797 if parentfn:
798 798 return parentfn(pid)
799 799 else:
800 800 return
801 801
802 802 if initfn:
803 803 initfn()
804 804
805 805 if not opts['daemon']:
806 806 writepid(os.getpid())
807 807
808 808 if opts['daemon_pipefds']:
809 809 lockpath = opts['daemon_pipefds']
810 810 try:
811 811 os.setsid()
812 812 except AttributeError:
813 813 pass
814 814 os.unlink(lockpath)
815 815 util.hidewindow()
816 816 sys.stdout.flush()
817 817 sys.stderr.flush()
818 818
819 819 nullfd = os.open(os.devnull, os.O_RDWR)
820 820 logfilefd = nullfd
821 821 if logfile:
822 822 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
823 823 os.dup2(nullfd, 0)
824 824 os.dup2(logfilefd, 1)
825 825 os.dup2(logfilefd, 2)
826 826 if nullfd not in (0, 1, 2):
827 827 os.close(nullfd)
828 828 if logfile and logfilefd not in (0, 1, 2):
829 829 os.close(logfilefd)
830 830
831 831 if runfn:
832 832 return runfn()
833 833
834 834 ## facility to let extension process additional data into an import patch
835 835 # list of identifier to be executed in order
836 836 extrapreimport = [] # run before commit
837 837 extrapostimport = [] # run after commit
838 838 # mapping from identifier to actual import function
839 839 #
840 840 # 'preimport' are run before the commit is made and are provided the following
841 841 # arguments:
842 842 # - repo: the localrepository instance,
843 843 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
844 844 # - extra: the future extra dictionary of the changeset, please mutate it,
845 845 # - opts: the import options.
846 846 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
847 847 # mutation of in memory commit and more. Feel free to rework the code to get
848 848 # there.
849 849 extrapreimportmap = {}
850 850 # 'postimport' are run after the commit is made and are provided the following
851 851 # argument:
852 852 # - ctx: the changectx created by import.
853 853 extrapostimportmap = {}
854 854
855 855 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
856 856 """Utility function used by commands.import to import a single patch
857 857
858 858 This function is explicitly defined here to help the evolve extension to
859 859 wrap this part of the import logic.
860 860
861 861 The API is currently a bit ugly because it a simple code translation from
862 862 the import command. Feel free to make it better.
863 863
864 864 :hunk: a patch (as a binary string)
865 865 :parents: nodes that will be parent of the created commit
866 866 :opts: the full dict of option passed to the import command
867 867 :msgs: list to save commit message to.
868 868 (used in case we need to save it when failing)
869 869 :updatefunc: a function that update a repo to a given node
870 870 updatefunc(<repo>, <node>)
871 871 """
872 872 # avoid cycle context -> subrepo -> cmdutil
873 873 import context
874 874 extractdata = patch.extract(ui, hunk)
875 875 tmpname = extractdata.get('filename')
876 876 message = extractdata.get('message')
877 877 user = extractdata.get('user')
878 878 date = extractdata.get('date')
879 879 branch = extractdata.get('branch')
880 880 nodeid = extractdata.get('nodeid')
881 881 p1 = extractdata.get('p1')
882 882 p2 = extractdata.get('p2')
883 883
884 884 update = not opts.get('bypass')
885 885 strip = opts["strip"]
886 886 prefix = opts["prefix"]
887 887 sim = float(opts.get('similarity') or 0)
888 888 if not tmpname:
889 889 return (None, None, False)
890 890 msg = _('applied to working directory')
891 891
892 892 rejects = False
893 893
894 894 try:
895 895 cmdline_message = logmessage(ui, opts)
896 896 if cmdline_message:
897 897 # pickup the cmdline msg
898 898 message = cmdline_message
899 899 elif message:
900 900 # pickup the patch msg
901 901 message = message.strip()
902 902 else:
903 903 # launch the editor
904 904 message = None
905 905 ui.debug('message:\n%s\n' % message)
906 906
907 907 if len(parents) == 1:
908 908 parents.append(repo[nullid])
909 909 if opts.get('exact'):
910 910 if not nodeid or not p1:
911 911 raise error.Abort(_('not a Mercurial patch'))
912 912 p1 = repo[p1]
913 913 p2 = repo[p2 or nullid]
914 914 elif p2:
915 915 try:
916 916 p1 = repo[p1]
917 917 p2 = repo[p2]
918 918 # Without any options, consider p2 only if the
919 919 # patch is being applied on top of the recorded
920 920 # first parent.
921 921 if p1 != parents[0]:
922 922 p1 = parents[0]
923 923 p2 = repo[nullid]
924 924 except error.RepoError:
925 925 p1, p2 = parents
926 926 if p2.node() == nullid:
927 927 ui.warn(_("warning: import the patch as a normal revision\n"
928 928 "(use --exact to import the patch as a merge)\n"))
929 929 else:
930 930 p1, p2 = parents
931 931
932 932 n = None
933 933 if update:
934 934 if p1 != parents[0]:
935 935 updatefunc(repo, p1.node())
936 936 if p2 != parents[1]:
937 937 repo.setparents(p1.node(), p2.node())
938 938
939 939 if opts.get('exact') or opts.get('import_branch'):
940 940 repo.dirstate.setbranch(branch or 'default')
941 941
942 942 partial = opts.get('partial', False)
943 943 files = set()
944 944 try:
945 945 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
946 946 files=files, eolmode=None, similarity=sim / 100.0)
947 947 except patch.PatchError as e:
948 948 if not partial:
949 949 raise error.Abort(str(e))
950 950 if partial:
951 951 rejects = True
952 952
953 953 files = list(files)
954 954 if opts.get('no_commit'):
955 955 if message:
956 956 msgs.append(message)
957 957 else:
958 958 if opts.get('exact') or p2:
959 959 # If you got here, you either use --force and know what
960 960 # you are doing or used --exact or a merge patch while
961 961 # being updated to its first parent.
962 962 m = None
963 963 else:
964 964 m = scmutil.matchfiles(repo, files or [])
965 965 editform = mergeeditform(repo[None], 'import.normal')
966 966 if opts.get('exact'):
967 967 editor = None
968 968 else:
969 969 editor = getcommiteditor(editform=editform, **opts)
970 970 allowemptyback = repo.ui.backupconfig('ui', 'allowemptycommit')
971 971 extra = {}
972 972 for idfunc in extrapreimport:
973 973 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
974 974 try:
975 975 if partial:
976 976 repo.ui.setconfig('ui', 'allowemptycommit', True)
977 977 n = repo.commit(message, opts.get('user') or user,
978 978 opts.get('date') or date, match=m,
979 979 editor=editor, extra=extra)
980 980 for idfunc in extrapostimport:
981 981 extrapostimportmap[idfunc](repo[n])
982 982 finally:
983 983 repo.ui.restoreconfig(allowemptyback)
984 984 else:
985 985 if opts.get('exact') or opts.get('import_branch'):
986 986 branch = branch or 'default'
987 987 else:
988 988 branch = p1.branch()
989 989 store = patch.filestore()
990 990 try:
991 991 files = set()
992 992 try:
993 993 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
994 994 files, eolmode=None)
995 995 except patch.PatchError as e:
996 996 raise error.Abort(str(e))
997 997 if opts.get('exact'):
998 998 editor = None
999 999 else:
1000 1000 editor = getcommiteditor(editform='import.bypass')
1001 1001 memctx = context.makememctx(repo, (p1.node(), p2.node()),
1002 1002 message,
1003 1003 opts.get('user') or user,
1004 1004 opts.get('date') or date,
1005 1005 branch, files, store,
1006 1006 editor=editor)
1007 1007 n = memctx.commit()
1008 1008 finally:
1009 1009 store.close()
1010 1010 if opts.get('exact') and opts.get('no_commit'):
1011 1011 # --exact with --no-commit is still useful in that it does merge
1012 1012 # and branch bits
1013 1013 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1014 1014 elif opts.get('exact') and hex(n) != nodeid:
1015 1015 raise error.Abort(_('patch is damaged or loses information'))
1016 1016 if n:
1017 1017 # i18n: refers to a short changeset id
1018 1018 msg = _('created %s') % short(n)
1019 1019 return (msg, n, rejects)
1020 1020 finally:
1021 1021 os.unlink(tmpname)
1022 1022
1023 1023 # facility to let extensions include additional data in an exported patch
1024 1024 # list of identifiers to be executed in order
1025 1025 extraexport = []
1026 1026 # mapping from identifier to actual export function
1027 1027 # function as to return a string to be added to the header or None
1028 1028 # it is given two arguments (sequencenumber, changectx)
1029 1029 extraexportmap = {}
1030 1030
1031 1031 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1032 1032 opts=None, match=None):
1033 1033 '''export changesets as hg patches.'''
1034 1034
1035 1035 total = len(revs)
1036 1036 revwidth = max([len(str(rev)) for rev in revs])
1037 1037 filemode = {}
1038 1038
1039 1039 def single(rev, seqno, fp):
1040 1040 ctx = repo[rev]
1041 1041 node = ctx.node()
1042 1042 parents = [p.node() for p in ctx.parents() if p]
1043 1043 branch = ctx.branch()
1044 1044 if switch_parent:
1045 1045 parents.reverse()
1046 1046
1047 1047 if parents:
1048 1048 prev = parents[0]
1049 1049 else:
1050 1050 prev = nullid
1051 1051
1052 1052 shouldclose = False
1053 1053 if not fp and len(template) > 0:
1054 1054 desc_lines = ctx.description().rstrip().split('\n')
1055 1055 desc = desc_lines[0] #Commit always has a first line.
1056 1056 fp = makefileobj(repo, template, node, desc=desc, total=total,
1057 1057 seqno=seqno, revwidth=revwidth, mode='wb',
1058 1058 modemap=filemode)
1059 1059 if fp != template:
1060 1060 shouldclose = True
1061 1061 if fp and fp != sys.stdout and util.safehasattr(fp, 'name'):
1062 1062 repo.ui.note("%s\n" % fp.name)
1063 1063
1064 1064 if not fp:
1065 1065 write = repo.ui.write
1066 1066 else:
1067 1067 def write(s, **kw):
1068 1068 fp.write(s)
1069 1069
1070 1070 write("# HG changeset patch\n")
1071 1071 write("# User %s\n" % ctx.user())
1072 1072 write("# Date %d %d\n" % ctx.date())
1073 1073 write("# %s\n" % util.datestr(ctx.date()))
1074 1074 if branch and branch != 'default':
1075 1075 write("# Branch %s\n" % branch)
1076 1076 write("# Node ID %s\n" % hex(node))
1077 1077 write("# Parent %s\n" % hex(prev))
1078 1078 if len(parents) > 1:
1079 1079 write("# Parent %s\n" % hex(parents[1]))
1080 1080
1081 1081 for headerid in extraexport:
1082 1082 header = extraexportmap[headerid](seqno, ctx)
1083 1083 if header is not None:
1084 1084 write('# %s\n' % header)
1085 1085 write(ctx.description().rstrip())
1086 1086 write("\n\n")
1087 1087
1088 1088 for chunk, label in patch.diffui(repo, prev, node, match, opts=opts):
1089 1089 write(chunk, label=label)
1090 1090
1091 1091 if shouldclose:
1092 1092 fp.close()
1093 1093
1094 1094 for seqno, rev in enumerate(revs):
1095 1095 single(rev, seqno + 1, fp)
1096 1096
1097 1097 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1098 1098 changes=None, stat=False, fp=None, prefix='',
1099 1099 root='', listsubrepos=False):
1100 1100 '''show diff or diffstat.'''
1101 1101 if fp is None:
1102 1102 write = ui.write
1103 1103 else:
1104 1104 def write(s, **kw):
1105 1105 fp.write(s)
1106 1106
1107 1107 if root:
1108 1108 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1109 1109 else:
1110 1110 relroot = ''
1111 1111 if relroot != '':
1112 1112 # XXX relative roots currently don't work if the root is within a
1113 1113 # subrepo
1114 1114 uirelroot = match.uipath(relroot)
1115 1115 relroot += '/'
1116 1116 for matchroot in match.files():
1117 1117 if not matchroot.startswith(relroot):
1118 1118 ui.warn(_('warning: %s not inside relative root %s\n') % (
1119 1119 match.uipath(matchroot), uirelroot))
1120 1120
1121 1121 if stat:
1122 1122 diffopts = diffopts.copy(context=0)
1123 1123 width = 80
1124 1124 if not ui.plain():
1125 1125 width = ui.termwidth()
1126 1126 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1127 1127 prefix=prefix, relroot=relroot)
1128 1128 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1129 1129 width=width,
1130 1130 git=diffopts.git):
1131 1131 write(chunk, label=label)
1132 1132 else:
1133 1133 for chunk, label in patch.diffui(repo, node1, node2, match,
1134 1134 changes, diffopts, prefix=prefix,
1135 1135 relroot=relroot):
1136 1136 write(chunk, label=label)
1137 1137
1138 1138 if listsubrepos:
1139 1139 ctx1 = repo[node1]
1140 1140 ctx2 = repo[node2]
1141 1141 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1142 1142 tempnode2 = node2
1143 1143 try:
1144 1144 if node2 is not None:
1145 1145 tempnode2 = ctx2.substate[subpath][1]
1146 1146 except KeyError:
1147 1147 # A subrepo that existed in node1 was deleted between node1 and
1148 1148 # node2 (inclusive). Thus, ctx2's substate won't contain that
1149 1149 # subpath. The best we can do is to ignore it.
1150 1150 tempnode2 = None
1151 1151 submatch = matchmod.narrowmatcher(subpath, match)
1152 1152 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1153 1153 stat=stat, fp=fp, prefix=prefix)
1154 1154
1155 1155 class changeset_printer(object):
1156 1156 '''show changeset information when templating not requested.'''
1157 1157
1158 1158 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1159 1159 self.ui = ui
1160 1160 self.repo = repo
1161 1161 self.buffered = buffered
1162 1162 self.matchfn = matchfn
1163 1163 self.diffopts = diffopts
1164 1164 self.header = {}
1165 1165 self.hunk = {}
1166 1166 self.lastheader = None
1167 1167 self.footer = None
1168 1168
1169 1169 def flush(self, ctx):
1170 1170 rev = ctx.rev()
1171 1171 if rev in self.header:
1172 1172 h = self.header[rev]
1173 1173 if h != self.lastheader:
1174 1174 self.lastheader = h
1175 1175 self.ui.write(h)
1176 1176 del self.header[rev]
1177 1177 if rev in self.hunk:
1178 1178 self.ui.write(self.hunk[rev])
1179 1179 del self.hunk[rev]
1180 1180 return 1
1181 1181 return 0
1182 1182
1183 1183 def close(self):
1184 1184 if self.footer:
1185 1185 self.ui.write(self.footer)
1186 1186
1187 1187 def show(self, ctx, copies=None, matchfn=None, **props):
1188 1188 if self.buffered:
1189 1189 self.ui.pushbuffer()
1190 1190 self._show(ctx, copies, matchfn, props)
1191 1191 self.hunk[ctx.rev()] = self.ui.popbuffer(labeled=True)
1192 1192 else:
1193 1193 self._show(ctx, copies, matchfn, props)
1194 1194
1195 1195 def _show(self, ctx, copies, matchfn, props):
1196 1196 '''show a single changeset or file revision'''
1197 1197 changenode = ctx.node()
1198 1198 rev = ctx.rev()
1199 1199 if self.ui.debugflag:
1200 1200 hexfunc = hex
1201 1201 else:
1202 1202 hexfunc = short
1203 1203 # as of now, wctx.node() and wctx.rev() return None, but we want to
1204 1204 # show the same values as {node} and {rev} templatekw
1205 1205 revnode = (scmutil.intrev(rev), hexfunc(bin(ctx.hex())))
1206 1206
1207 1207 if self.ui.quiet:
1208 1208 self.ui.write("%d:%s\n" % revnode, label='log.node')
1209 1209 return
1210 1210
1211 1211 date = util.datestr(ctx.date())
1212 1212
1213 1213 # i18n: column positioning for "hg log"
1214 1214 self.ui.write(_("changeset: %d:%s\n") % revnode,
1215 1215 label='log.changeset changeset.%s' % ctx.phasestr())
1216 1216
1217 1217 # branches are shown first before any other names due to backwards
1218 1218 # compatibility
1219 1219 branch = ctx.branch()
1220 1220 # don't show the default branch name
1221 1221 if branch != 'default':
1222 1222 # i18n: column positioning for "hg log"
1223 1223 self.ui.write(_("branch: %s\n") % branch,
1224 1224 label='log.branch')
1225 1225
1226 1226 for name, ns in self.repo.names.iteritems():
1227 1227 # branches has special logic already handled above, so here we just
1228 1228 # skip it
1229 1229 if name == 'branches':
1230 1230 continue
1231 1231 # we will use the templatename as the color name since those two
1232 1232 # should be the same
1233 1233 for name in ns.names(self.repo, changenode):
1234 1234 self.ui.write(ns.logfmt % name,
1235 1235 label='log.%s' % ns.colorname)
1236 1236 if self.ui.debugflag:
1237 1237 # i18n: column positioning for "hg log"
1238 1238 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1239 1239 label='log.phase')
1240 1240 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1241 1241 label = 'log.parent changeset.%s' % pctx.phasestr()
1242 1242 # i18n: column positioning for "hg log"
1243 1243 self.ui.write(_("parent: %d:%s\n")
1244 1244 % (pctx.rev(), hexfunc(pctx.node())),
1245 1245 label=label)
1246 1246
1247 1247 if self.ui.debugflag and rev is not None:
1248 1248 mnode = ctx.manifestnode()
1249 1249 # i18n: column positioning for "hg log"
1250 1250 self.ui.write(_("manifest: %d:%s\n") %
1251 1251 (self.repo.manifest.rev(mnode), hex(mnode)),
1252 1252 label='ui.debug log.manifest')
1253 1253 # i18n: column positioning for "hg log"
1254 1254 self.ui.write(_("user: %s\n") % ctx.user(),
1255 1255 label='log.user')
1256 1256 # i18n: column positioning for "hg log"
1257 1257 self.ui.write(_("date: %s\n") % date,
1258 1258 label='log.date')
1259 1259
1260 1260 if self.ui.debugflag:
1261 1261 files = ctx.p1().status(ctx)[:3]
1262 1262 for key, value in zip([# i18n: column positioning for "hg log"
1263 1263 _("files:"),
1264 1264 # i18n: column positioning for "hg log"
1265 1265 _("files+:"),
1266 1266 # i18n: column positioning for "hg log"
1267 1267 _("files-:")], files):
1268 1268 if value:
1269 1269 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1270 1270 label='ui.debug log.files')
1271 1271 elif ctx.files() and self.ui.verbose:
1272 1272 # i18n: column positioning for "hg log"
1273 1273 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1274 1274 label='ui.note log.files')
1275 1275 if copies and self.ui.verbose:
1276 1276 copies = ['%s (%s)' % c for c in copies]
1277 1277 # i18n: column positioning for "hg log"
1278 1278 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1279 1279 label='ui.note log.copies')
1280 1280
1281 1281 extra = ctx.extra()
1282 1282 if extra and self.ui.debugflag:
1283 1283 for key, value in sorted(extra.items()):
1284 1284 # i18n: column positioning for "hg log"
1285 1285 self.ui.write(_("extra: %s=%s\n")
1286 1286 % (key, value.encode('string_escape')),
1287 1287 label='ui.debug log.extra')
1288 1288
1289 1289 description = ctx.description().strip()
1290 1290 if description:
1291 1291 if self.ui.verbose:
1292 1292 self.ui.write(_("description:\n"),
1293 1293 label='ui.note log.description')
1294 1294 self.ui.write(description,
1295 1295 label='ui.note log.description')
1296 1296 self.ui.write("\n\n")
1297 1297 else:
1298 1298 # i18n: column positioning for "hg log"
1299 1299 self.ui.write(_("summary: %s\n") %
1300 1300 description.splitlines()[0],
1301 1301 label='log.summary')
1302 1302 self.ui.write("\n")
1303 1303
1304 self.showpatch(changenode, matchfn)
1305
1306 def showpatch(self, node, matchfn):
1304 self.showpatch(ctx, matchfn)
1305
1306 def showpatch(self, ctx, matchfn):
1307 1307 if not matchfn:
1308 1308 matchfn = self.matchfn
1309 1309 if matchfn:
1310 1310 stat = self.diffopts.get('stat')
1311 1311 diff = self.diffopts.get('patch')
1312 1312 diffopts = patch.diffallopts(self.ui, self.diffopts)
1313 prev = self.repo.changelog.parents(node)[0]
1313 node = ctx.node()
1314 prev = ctx.p1()
1314 1315 if stat:
1315 1316 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1316 1317 match=matchfn, stat=True)
1317 1318 if diff:
1318 1319 if stat:
1319 1320 self.ui.write("\n")
1320 1321 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1321 1322 match=matchfn, stat=False)
1322 1323 self.ui.write("\n")
1323 1324
1324 1325 class jsonchangeset(changeset_printer):
1325 1326 '''format changeset information.'''
1326 1327
1327 1328 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1328 1329 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1329 1330 self.cache = {}
1330 1331 self._first = True
1331 1332
1332 1333 def close(self):
1333 1334 if not self._first:
1334 1335 self.ui.write("\n]\n")
1335 1336 else:
1336 1337 self.ui.write("[]\n")
1337 1338
1338 1339 def _show(self, ctx, copies, matchfn, props):
1339 1340 '''show a single changeset or file revision'''
1340 1341 rev = ctx.rev()
1341 1342 if rev is None:
1342 1343 jrev = jnode = 'null'
1343 1344 else:
1344 1345 jrev = str(rev)
1345 1346 jnode = '"%s"' % hex(ctx.node())
1346 1347 j = encoding.jsonescape
1347 1348
1348 1349 if self._first:
1349 1350 self.ui.write("[\n {")
1350 1351 self._first = False
1351 1352 else:
1352 1353 self.ui.write(",\n {")
1353 1354
1354 1355 if self.ui.quiet:
1355 1356 self.ui.write('\n "rev": %s' % jrev)
1356 1357 self.ui.write(',\n "node": %s' % jnode)
1357 1358 self.ui.write('\n }')
1358 1359 return
1359 1360
1360 1361 self.ui.write('\n "rev": %s' % jrev)
1361 1362 self.ui.write(',\n "node": %s' % jnode)
1362 1363 self.ui.write(',\n "branch": "%s"' % j(ctx.branch()))
1363 1364 self.ui.write(',\n "phase": "%s"' % ctx.phasestr())
1364 1365 self.ui.write(',\n "user": "%s"' % j(ctx.user()))
1365 1366 self.ui.write(',\n "date": [%d, %d]' % ctx.date())
1366 1367 self.ui.write(',\n "desc": "%s"' % j(ctx.description()))
1367 1368
1368 1369 self.ui.write(',\n "bookmarks": [%s]' %
1369 1370 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1370 1371 self.ui.write(',\n "tags": [%s]' %
1371 1372 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1372 1373 self.ui.write(',\n "parents": [%s]' %
1373 1374 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1374 1375
1375 1376 if self.ui.debugflag:
1376 1377 if rev is None:
1377 1378 jmanifestnode = 'null'
1378 1379 else:
1379 1380 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1380 1381 self.ui.write(',\n "manifest": %s' % jmanifestnode)
1381 1382
1382 1383 self.ui.write(',\n "extra": {%s}' %
1383 1384 ", ".join('"%s": "%s"' % (j(k), j(v))
1384 1385 for k, v in ctx.extra().items()))
1385 1386
1386 1387 files = ctx.p1().status(ctx)
1387 1388 self.ui.write(',\n "modified": [%s]' %
1388 1389 ", ".join('"%s"' % j(f) for f in files[0]))
1389 1390 self.ui.write(',\n "added": [%s]' %
1390 1391 ", ".join('"%s"' % j(f) for f in files[1]))
1391 1392 self.ui.write(',\n "removed": [%s]' %
1392 1393 ", ".join('"%s"' % j(f) for f in files[2]))
1393 1394
1394 1395 elif self.ui.verbose:
1395 1396 self.ui.write(',\n "files": [%s]' %
1396 1397 ", ".join('"%s"' % j(f) for f in ctx.files()))
1397 1398
1398 1399 if copies:
1399 1400 self.ui.write(',\n "copies": {%s}' %
1400 1401 ", ".join('"%s": "%s"' % (j(k), j(v))
1401 1402 for k, v in copies))
1402 1403
1403 1404 matchfn = self.matchfn
1404 1405 if matchfn:
1405 1406 stat = self.diffopts.get('stat')
1406 1407 diff = self.diffopts.get('patch')
1407 1408 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1408 1409 node, prev = ctx.node(), ctx.p1().node()
1409 1410 if stat:
1410 1411 self.ui.pushbuffer()
1411 1412 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1412 1413 match=matchfn, stat=True)
1413 1414 self.ui.write(',\n "diffstat": "%s"' % j(self.ui.popbuffer()))
1414 1415 if diff:
1415 1416 self.ui.pushbuffer()
1416 1417 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1417 1418 match=matchfn, stat=False)
1418 1419 self.ui.write(',\n "diff": "%s"' % j(self.ui.popbuffer()))
1419 1420
1420 1421 self.ui.write("\n }")
1421 1422
1422 1423 class changeset_templater(changeset_printer):
1423 1424 '''format changeset information.'''
1424 1425
1425 1426 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1426 1427 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1427 1428 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
1428 1429 defaulttempl = {
1429 1430 'parent': '{rev}:{node|formatnode} ',
1430 1431 'manifest': '{rev}:{node|formatnode}',
1431 1432 'file_copy': '{name} ({source})',
1432 1433 'extra': '{key}={value|stringescape}'
1433 1434 }
1434 1435 # filecopy is preserved for compatibility reasons
1435 1436 defaulttempl['filecopy'] = defaulttempl['file_copy']
1436 1437 self.t = templater.templater(mapfile, {'formatnode': formatnode},
1437 1438 cache=defaulttempl)
1438 1439 if tmpl:
1439 1440 self.t.cache['changeset'] = tmpl
1440 1441
1441 1442 self.cache = {}
1442 1443
1443 1444 # find correct templates for current mode
1444 1445 tmplmodes = [
1445 1446 (True, None),
1446 1447 (self.ui.verbose, 'verbose'),
1447 1448 (self.ui.quiet, 'quiet'),
1448 1449 (self.ui.debugflag, 'debug'),
1449 1450 ]
1450 1451
1451 1452 self._parts = {'header': '', 'footer': '', 'changeset': 'changeset',
1452 1453 'docheader': '', 'docfooter': ''}
1453 1454 for mode, postfix in tmplmodes:
1454 1455 for t in self._parts:
1455 1456 cur = t
1456 1457 if postfix:
1457 1458 cur += "_" + postfix
1458 1459 if mode and cur in self.t:
1459 1460 self._parts[t] = cur
1460 1461
1461 1462 if self._parts['docheader']:
1462 1463 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1463 1464
1464 1465 def close(self):
1465 1466 if self._parts['docfooter']:
1466 1467 if not self.footer:
1467 1468 self.footer = ""
1468 1469 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1469 1470 return super(changeset_templater, self).close()
1470 1471
1471 1472 def _show(self, ctx, copies, matchfn, props):
1472 1473 '''show a single changeset or file revision'''
1473 1474 props = props.copy()
1474 1475 props.update(templatekw.keywords)
1475 1476 props['templ'] = self.t
1476 1477 props['ctx'] = ctx
1477 1478 props['repo'] = self.repo
1478 1479 props['revcache'] = {'copies': copies}
1479 1480 props['cache'] = self.cache
1480 1481
1481 1482 try:
1482 1483 # write header
1483 1484 if self._parts['header']:
1484 1485 h = templater.stringify(self.t(self._parts['header'], **props))
1485 1486 if self.buffered:
1486 1487 self.header[ctx.rev()] = h
1487 1488 else:
1488 1489 if self.lastheader != h:
1489 1490 self.lastheader = h
1490 1491 self.ui.write(h)
1491 1492
1492 1493 # write changeset metadata, then patch if requested
1493 1494 key = self._parts['changeset']
1494 1495 self.ui.write(templater.stringify(self.t(key, **props)))
1495 self.showpatch(ctx.node(), matchfn)
1496 self.showpatch(ctx, matchfn)
1496 1497
1497 1498 if self._parts['footer']:
1498 1499 if not self.footer:
1499 1500 self.footer = templater.stringify(
1500 1501 self.t(self._parts['footer'], **props))
1501 1502 except KeyError as inst:
1502 1503 msg = _("%s: no key named '%s'")
1503 1504 raise error.Abort(msg % (self.t.mapfile, inst.args[0]))
1504 1505 except SyntaxError as inst:
1505 1506 raise error.Abort('%s: %s' % (self.t.mapfile, inst.args[0]))
1506 1507
1507 1508 def gettemplate(ui, tmpl, style):
1508 1509 """
1509 1510 Find the template matching the given template spec or style.
1510 1511 """
1511 1512
1512 1513 # ui settings
1513 1514 if not tmpl and not style: # template are stronger than style
1514 1515 tmpl = ui.config('ui', 'logtemplate')
1515 1516 if tmpl:
1516 1517 try:
1517 1518 tmpl = templater.unquotestring(tmpl)
1518 1519 except SyntaxError:
1519 1520 pass
1520 1521 return tmpl, None
1521 1522 else:
1522 1523 style = util.expandpath(ui.config('ui', 'style', ''))
1523 1524
1524 1525 if not tmpl and style:
1525 1526 mapfile = style
1526 1527 if not os.path.split(mapfile)[0]:
1527 1528 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1528 1529 or templater.templatepath(mapfile))
1529 1530 if mapname:
1530 1531 mapfile = mapname
1531 1532 return None, mapfile
1532 1533
1533 1534 if not tmpl:
1534 1535 return None, None
1535 1536
1536 1537 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1537 1538
1538 1539 def show_changeset(ui, repo, opts, buffered=False):
1539 1540 """show one changeset using template or regular display.
1540 1541
1541 1542 Display format will be the first non-empty hit of:
1542 1543 1. option 'template'
1543 1544 2. option 'style'
1544 1545 3. [ui] setting 'logtemplate'
1545 1546 4. [ui] setting 'style'
1546 1547 If all of these values are either the unset or the empty string,
1547 1548 regular display via changeset_printer() is done.
1548 1549 """
1549 1550 # options
1550 1551 matchfn = None
1551 1552 if opts.get('patch') or opts.get('stat'):
1552 1553 matchfn = scmutil.matchall(repo)
1553 1554
1554 1555 if opts.get('template') == 'json':
1555 1556 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1556 1557
1557 1558 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1558 1559
1559 1560 if not tmpl and not mapfile:
1560 1561 return changeset_printer(ui, repo, matchfn, opts, buffered)
1561 1562
1562 1563 try:
1563 1564 t = changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile,
1564 1565 buffered)
1565 1566 except SyntaxError as inst:
1566 1567 raise error.Abort(inst.args[0])
1567 1568 return t
1568 1569
1569 1570 def showmarker(ui, marker):
1570 1571 """utility function to display obsolescence marker in a readable way
1571 1572
1572 1573 To be used by debug function."""
1573 1574 ui.write(hex(marker.precnode()))
1574 1575 for repl in marker.succnodes():
1575 1576 ui.write(' ')
1576 1577 ui.write(hex(repl))
1577 1578 ui.write(' %X ' % marker.flags())
1578 1579 parents = marker.parentnodes()
1579 1580 if parents is not None:
1580 1581 ui.write('{%s} ' % ', '.join(hex(p) for p in parents))
1581 1582 ui.write('(%s) ' % util.datestr(marker.date()))
1582 1583 ui.write('{%s}' % (', '.join('%r: %r' % t for t in
1583 1584 sorted(marker.metadata().items())
1584 1585 if t[0] != 'date')))
1585 1586 ui.write('\n')
1586 1587
1587 1588 def finddate(ui, repo, date):
1588 1589 """Find the tipmost changeset that matches the given date spec"""
1589 1590
1590 1591 df = util.matchdate(date)
1591 1592 m = scmutil.matchall(repo)
1592 1593 results = {}
1593 1594
1594 1595 def prep(ctx, fns):
1595 1596 d = ctx.date()
1596 1597 if df(d[0]):
1597 1598 results[ctx.rev()] = d
1598 1599
1599 1600 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1600 1601 rev = ctx.rev()
1601 1602 if rev in results:
1602 1603 ui.status(_("found revision %s from %s\n") %
1603 1604 (rev, util.datestr(results[rev])))
1604 1605 return str(rev)
1605 1606
1606 1607 raise error.Abort(_("revision matching date not found"))
1607 1608
1608 1609 def increasingwindows(windowsize=8, sizelimit=512):
1609 1610 while True:
1610 1611 yield windowsize
1611 1612 if windowsize < sizelimit:
1612 1613 windowsize *= 2
1613 1614
1614 1615 class FileWalkError(Exception):
1615 1616 pass
1616 1617
1617 1618 def walkfilerevs(repo, match, follow, revs, fncache):
1618 1619 '''Walks the file history for the matched files.
1619 1620
1620 1621 Returns the changeset revs that are involved in the file history.
1621 1622
1622 1623 Throws FileWalkError if the file history can't be walked using
1623 1624 filelogs alone.
1624 1625 '''
1625 1626 wanted = set()
1626 1627 copies = []
1627 1628 minrev, maxrev = min(revs), max(revs)
1628 1629 def filerevgen(filelog, last):
1629 1630 """
1630 1631 Only files, no patterns. Check the history of each file.
1631 1632
1632 1633 Examines filelog entries within minrev, maxrev linkrev range
1633 1634 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1634 1635 tuples in backwards order
1635 1636 """
1636 1637 cl_count = len(repo)
1637 1638 revs = []
1638 1639 for j in xrange(0, last + 1):
1639 1640 linkrev = filelog.linkrev(j)
1640 1641 if linkrev < minrev:
1641 1642 continue
1642 1643 # only yield rev for which we have the changelog, it can
1643 1644 # happen while doing "hg log" during a pull or commit
1644 1645 if linkrev >= cl_count:
1645 1646 break
1646 1647
1647 1648 parentlinkrevs = []
1648 1649 for p in filelog.parentrevs(j):
1649 1650 if p != nullrev:
1650 1651 parentlinkrevs.append(filelog.linkrev(p))
1651 1652 n = filelog.node(j)
1652 1653 revs.append((linkrev, parentlinkrevs,
1653 1654 follow and filelog.renamed(n)))
1654 1655
1655 1656 return reversed(revs)
1656 1657 def iterfiles():
1657 1658 pctx = repo['.']
1658 1659 for filename in match.files():
1659 1660 if follow:
1660 1661 if filename not in pctx:
1661 1662 raise error.Abort(_('cannot follow file not in parent '
1662 1663 'revision: "%s"') % filename)
1663 1664 yield filename, pctx[filename].filenode()
1664 1665 else:
1665 1666 yield filename, None
1666 1667 for filename_node in copies:
1667 1668 yield filename_node
1668 1669
1669 1670 for file_, node in iterfiles():
1670 1671 filelog = repo.file(file_)
1671 1672 if not len(filelog):
1672 1673 if node is None:
1673 1674 # A zero count may be a directory or deleted file, so
1674 1675 # try to find matching entries on the slow path.
1675 1676 if follow:
1676 1677 raise error.Abort(
1677 1678 _('cannot follow nonexistent file: "%s"') % file_)
1678 1679 raise FileWalkError("Cannot walk via filelog")
1679 1680 else:
1680 1681 continue
1681 1682
1682 1683 if node is None:
1683 1684 last = len(filelog) - 1
1684 1685 else:
1685 1686 last = filelog.rev(node)
1686 1687
1687 1688 # keep track of all ancestors of the file
1688 1689 ancestors = set([filelog.linkrev(last)])
1689 1690
1690 1691 # iterate from latest to oldest revision
1691 1692 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1692 1693 if not follow:
1693 1694 if rev > maxrev:
1694 1695 continue
1695 1696 else:
1696 1697 # Note that last might not be the first interesting
1697 1698 # rev to us:
1698 1699 # if the file has been changed after maxrev, we'll
1699 1700 # have linkrev(last) > maxrev, and we still need
1700 1701 # to explore the file graph
1701 1702 if rev not in ancestors:
1702 1703 continue
1703 1704 # XXX insert 1327 fix here
1704 1705 if flparentlinkrevs:
1705 1706 ancestors.update(flparentlinkrevs)
1706 1707
1707 1708 fncache.setdefault(rev, []).append(file_)
1708 1709 wanted.add(rev)
1709 1710 if copied:
1710 1711 copies.append(copied)
1711 1712
1712 1713 return wanted
1713 1714
1714 1715 class _followfilter(object):
1715 1716 def __init__(self, repo, onlyfirst=False):
1716 1717 self.repo = repo
1717 1718 self.startrev = nullrev
1718 1719 self.roots = set()
1719 1720 self.onlyfirst = onlyfirst
1720 1721
1721 1722 def match(self, rev):
1722 1723 def realparents(rev):
1723 1724 if self.onlyfirst:
1724 1725 return self.repo.changelog.parentrevs(rev)[0:1]
1725 1726 else:
1726 1727 return filter(lambda x: x != nullrev,
1727 1728 self.repo.changelog.parentrevs(rev))
1728 1729
1729 1730 if self.startrev == nullrev:
1730 1731 self.startrev = rev
1731 1732 return True
1732 1733
1733 1734 if rev > self.startrev:
1734 1735 # forward: all descendants
1735 1736 if not self.roots:
1736 1737 self.roots.add(self.startrev)
1737 1738 for parent in realparents(rev):
1738 1739 if parent in self.roots:
1739 1740 self.roots.add(rev)
1740 1741 return True
1741 1742 else:
1742 1743 # backwards: all parents
1743 1744 if not self.roots:
1744 1745 self.roots.update(realparents(self.startrev))
1745 1746 if rev in self.roots:
1746 1747 self.roots.remove(rev)
1747 1748 self.roots.update(realparents(rev))
1748 1749 return True
1749 1750
1750 1751 return False
1751 1752
1752 1753 def walkchangerevs(repo, match, opts, prepare):
1753 1754 '''Iterate over files and the revs in which they changed.
1754 1755
1755 1756 Callers most commonly need to iterate backwards over the history
1756 1757 in which they are interested. Doing so has awful (quadratic-looking)
1757 1758 performance, so we use iterators in a "windowed" way.
1758 1759
1759 1760 We walk a window of revisions in the desired order. Within the
1760 1761 window, we first walk forwards to gather data, then in the desired
1761 1762 order (usually backwards) to display it.
1762 1763
1763 1764 This function returns an iterator yielding contexts. Before
1764 1765 yielding each context, the iterator will first call the prepare
1765 1766 function on each context in the window in forward order.'''
1766 1767
1767 1768 follow = opts.get('follow') or opts.get('follow_first')
1768 1769 revs = _logrevs(repo, opts)
1769 1770 if not revs:
1770 1771 return []
1771 1772 wanted = set()
1772 1773 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1773 1774 opts.get('removed'))
1774 1775 fncache = {}
1775 1776 change = repo.changectx
1776 1777
1777 1778 # First step is to fill wanted, the set of revisions that we want to yield.
1778 1779 # When it does not induce extra cost, we also fill fncache for revisions in
1779 1780 # wanted: a cache of filenames that were changed (ctx.files()) and that
1780 1781 # match the file filtering conditions.
1781 1782
1782 1783 if match.always():
1783 1784 # No files, no patterns. Display all revs.
1784 1785 wanted = revs
1785 1786 elif not slowpath:
1786 1787 # We only have to read through the filelog to find wanted revisions
1787 1788
1788 1789 try:
1789 1790 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1790 1791 except FileWalkError:
1791 1792 slowpath = True
1792 1793
1793 1794 # We decided to fall back to the slowpath because at least one
1794 1795 # of the paths was not a file. Check to see if at least one of them
1795 1796 # existed in history, otherwise simply return
1796 1797 for path in match.files():
1797 1798 if path == '.' or path in repo.store:
1798 1799 break
1799 1800 else:
1800 1801 return []
1801 1802
1802 1803 if slowpath:
1803 1804 # We have to read the changelog to match filenames against
1804 1805 # changed files
1805 1806
1806 1807 if follow:
1807 1808 raise error.Abort(_('can only follow copies/renames for explicit '
1808 1809 'filenames'))
1809 1810
1810 1811 # The slow path checks files modified in every changeset.
1811 1812 # This is really slow on large repos, so compute the set lazily.
1812 1813 class lazywantedset(object):
1813 1814 def __init__(self):
1814 1815 self.set = set()
1815 1816 self.revs = set(revs)
1816 1817
1817 1818 # No need to worry about locality here because it will be accessed
1818 1819 # in the same order as the increasing window below.
1819 1820 def __contains__(self, value):
1820 1821 if value in self.set:
1821 1822 return True
1822 1823 elif not value in self.revs:
1823 1824 return False
1824 1825 else:
1825 1826 self.revs.discard(value)
1826 1827 ctx = change(value)
1827 1828 matches = filter(match, ctx.files())
1828 1829 if matches:
1829 1830 fncache[value] = matches
1830 1831 self.set.add(value)
1831 1832 return True
1832 1833 return False
1833 1834
1834 1835 def discard(self, value):
1835 1836 self.revs.discard(value)
1836 1837 self.set.discard(value)
1837 1838
1838 1839 wanted = lazywantedset()
1839 1840
1840 1841 # it might be worthwhile to do this in the iterator if the rev range
1841 1842 # is descending and the prune args are all within that range
1842 1843 for rev in opts.get('prune', ()):
1843 1844 rev = repo[rev].rev()
1844 1845 ff = _followfilter(repo)
1845 1846 stop = min(revs[0], revs[-1])
1846 1847 for x in xrange(rev, stop - 1, -1):
1847 1848 if ff.match(x):
1848 1849 wanted = wanted - [x]
1849 1850
1850 1851 # Now that wanted is correctly initialized, we can iterate over the
1851 1852 # revision range, yielding only revisions in wanted.
1852 1853 def iterate():
1853 1854 if follow and match.always():
1854 1855 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1855 1856 def want(rev):
1856 1857 return ff.match(rev) and rev in wanted
1857 1858 else:
1858 1859 def want(rev):
1859 1860 return rev in wanted
1860 1861
1861 1862 it = iter(revs)
1862 1863 stopiteration = False
1863 1864 for windowsize in increasingwindows():
1864 1865 nrevs = []
1865 1866 for i in xrange(windowsize):
1866 1867 rev = next(it, None)
1867 1868 if rev is None:
1868 1869 stopiteration = True
1869 1870 break
1870 1871 elif want(rev):
1871 1872 nrevs.append(rev)
1872 1873 for rev in sorted(nrevs):
1873 1874 fns = fncache.get(rev)
1874 1875 ctx = change(rev)
1875 1876 if not fns:
1876 1877 def fns_generator():
1877 1878 for f in ctx.files():
1878 1879 if match(f):
1879 1880 yield f
1880 1881 fns = fns_generator()
1881 1882 prepare(ctx, fns)
1882 1883 for rev in nrevs:
1883 1884 yield change(rev)
1884 1885
1885 1886 if stopiteration:
1886 1887 break
1887 1888
1888 1889 return iterate()
1889 1890
1890 1891 def _makefollowlogfilematcher(repo, files, followfirst):
1891 1892 # When displaying a revision with --patch --follow FILE, we have
1892 1893 # to know which file of the revision must be diffed. With
1893 1894 # --follow, we want the names of the ancestors of FILE in the
1894 1895 # revision, stored in "fcache". "fcache" is populated by
1895 1896 # reproducing the graph traversal already done by --follow revset
1896 1897 # and relating linkrevs to file names (which is not "correct" but
1897 1898 # good enough).
1898 1899 fcache = {}
1899 1900 fcacheready = [False]
1900 1901 pctx = repo['.']
1901 1902
1902 1903 def populate():
1903 1904 for fn in files:
1904 1905 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
1905 1906 for c in i:
1906 1907 fcache.setdefault(c.linkrev(), set()).add(c.path())
1907 1908
1908 1909 def filematcher(rev):
1909 1910 if not fcacheready[0]:
1910 1911 # Lazy initialization
1911 1912 fcacheready[0] = True
1912 1913 populate()
1913 1914 return scmutil.matchfiles(repo, fcache.get(rev, []))
1914 1915
1915 1916 return filematcher
1916 1917
1917 1918 def _makenofollowlogfilematcher(repo, pats, opts):
1918 1919 '''hook for extensions to override the filematcher for non-follow cases'''
1919 1920 return None
1920 1921
1921 1922 def _makelogrevset(repo, pats, opts, revs):
1922 1923 """Return (expr, filematcher) where expr is a revset string built
1923 1924 from log options and file patterns or None. If --stat or --patch
1924 1925 are not passed filematcher is None. Otherwise it is a callable
1925 1926 taking a revision number and returning a match objects filtering
1926 1927 the files to be detailed when displaying the revision.
1927 1928 """
1928 1929 opt2revset = {
1929 1930 'no_merges': ('not merge()', None),
1930 1931 'only_merges': ('merge()', None),
1931 1932 '_ancestors': ('ancestors(%(val)s)', None),
1932 1933 '_fancestors': ('_firstancestors(%(val)s)', None),
1933 1934 '_descendants': ('descendants(%(val)s)', None),
1934 1935 '_fdescendants': ('_firstdescendants(%(val)s)', None),
1935 1936 '_matchfiles': ('_matchfiles(%(val)s)', None),
1936 1937 'date': ('date(%(val)r)', None),
1937 1938 'branch': ('branch(%(val)r)', ' or '),
1938 1939 '_patslog': ('filelog(%(val)r)', ' or '),
1939 1940 '_patsfollow': ('follow(%(val)r)', ' or '),
1940 1941 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
1941 1942 'keyword': ('keyword(%(val)r)', ' or '),
1942 1943 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
1943 1944 'user': ('user(%(val)r)', ' or '),
1944 1945 }
1945 1946
1946 1947 opts = dict(opts)
1947 1948 # follow or not follow?
1948 1949 follow = opts.get('follow') or opts.get('follow_first')
1949 1950 if opts.get('follow_first'):
1950 1951 followfirst = 1
1951 1952 else:
1952 1953 followfirst = 0
1953 1954 # --follow with FILE behavior depends on revs...
1954 1955 it = iter(revs)
1955 1956 startrev = it.next()
1956 1957 followdescendants = startrev < next(it, startrev)
1957 1958
1958 1959 # branch and only_branch are really aliases and must be handled at
1959 1960 # the same time
1960 1961 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
1961 1962 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
1962 1963 # pats/include/exclude are passed to match.match() directly in
1963 1964 # _matchfiles() revset but walkchangerevs() builds its matcher with
1964 1965 # scmutil.match(). The difference is input pats are globbed on
1965 1966 # platforms without shell expansion (windows).
1966 1967 wctx = repo[None]
1967 1968 match, pats = scmutil.matchandpats(wctx, pats, opts)
1968 1969 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1969 1970 opts.get('removed'))
1970 1971 if not slowpath:
1971 1972 for f in match.files():
1972 1973 if follow and f not in wctx:
1973 1974 # If the file exists, it may be a directory, so let it
1974 1975 # take the slow path.
1975 1976 if os.path.exists(repo.wjoin(f)):
1976 1977 slowpath = True
1977 1978 continue
1978 1979 else:
1979 1980 raise error.Abort(_('cannot follow file not in parent '
1980 1981 'revision: "%s"') % f)
1981 1982 filelog = repo.file(f)
1982 1983 if not filelog:
1983 1984 # A zero count may be a directory or deleted file, so
1984 1985 # try to find matching entries on the slow path.
1985 1986 if follow:
1986 1987 raise error.Abort(
1987 1988 _('cannot follow nonexistent file: "%s"') % f)
1988 1989 slowpath = True
1989 1990
1990 1991 # We decided to fall back to the slowpath because at least one
1991 1992 # of the paths was not a file. Check to see if at least one of them
1992 1993 # existed in history - in that case, we'll continue down the
1993 1994 # slowpath; otherwise, we can turn off the slowpath
1994 1995 if slowpath:
1995 1996 for path in match.files():
1996 1997 if path == '.' or path in repo.store:
1997 1998 break
1998 1999 else:
1999 2000 slowpath = False
2000 2001
2001 2002 fpats = ('_patsfollow', '_patsfollowfirst')
2002 2003 fnopats = (('_ancestors', '_fancestors'),
2003 2004 ('_descendants', '_fdescendants'))
2004 2005 if slowpath:
2005 2006 # See walkchangerevs() slow path.
2006 2007 #
2007 2008 # pats/include/exclude cannot be represented as separate
2008 2009 # revset expressions as their filtering logic applies at file
2009 2010 # level. For instance "-I a -X a" matches a revision touching
2010 2011 # "a" and "b" while "file(a) and not file(b)" does
2011 2012 # not. Besides, filesets are evaluated against the working
2012 2013 # directory.
2013 2014 matchargs = ['r:', 'd:relpath']
2014 2015 for p in pats:
2015 2016 matchargs.append('p:' + p)
2016 2017 for p in opts.get('include', []):
2017 2018 matchargs.append('i:' + p)
2018 2019 for p in opts.get('exclude', []):
2019 2020 matchargs.append('x:' + p)
2020 2021 matchargs = ','.join(('%r' % p) for p in matchargs)
2021 2022 opts['_matchfiles'] = matchargs
2022 2023 if follow:
2023 2024 opts[fnopats[0][followfirst]] = '.'
2024 2025 else:
2025 2026 if follow:
2026 2027 if pats:
2027 2028 # follow() revset interprets its file argument as a
2028 2029 # manifest entry, so use match.files(), not pats.
2029 2030 opts[fpats[followfirst]] = list(match.files())
2030 2031 else:
2031 2032 op = fnopats[followdescendants][followfirst]
2032 2033 opts[op] = 'rev(%d)' % startrev
2033 2034 else:
2034 2035 opts['_patslog'] = list(pats)
2035 2036
2036 2037 filematcher = None
2037 2038 if opts.get('patch') or opts.get('stat'):
2038 2039 # When following files, track renames via a special matcher.
2039 2040 # If we're forced to take the slowpath it means we're following
2040 2041 # at least one pattern/directory, so don't bother with rename tracking.
2041 2042 if follow and not match.always() and not slowpath:
2042 2043 # _makefollowlogfilematcher expects its files argument to be
2043 2044 # relative to the repo root, so use match.files(), not pats.
2044 2045 filematcher = _makefollowlogfilematcher(repo, match.files(),
2045 2046 followfirst)
2046 2047 else:
2047 2048 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2048 2049 if filematcher is None:
2049 2050 filematcher = lambda rev: match
2050 2051
2051 2052 expr = []
2052 2053 for op, val in sorted(opts.iteritems()):
2053 2054 if not val:
2054 2055 continue
2055 2056 if op not in opt2revset:
2056 2057 continue
2057 2058 revop, andor = opt2revset[op]
2058 2059 if '%(val)' not in revop:
2059 2060 expr.append(revop)
2060 2061 else:
2061 2062 if not isinstance(val, list):
2062 2063 e = revop % {'val': val}
2063 2064 else:
2064 2065 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2065 2066 expr.append(e)
2066 2067
2067 2068 if expr:
2068 2069 expr = '(' + ' and '.join(expr) + ')'
2069 2070 else:
2070 2071 expr = None
2071 2072 return expr, filematcher
2072 2073
2073 2074 def _logrevs(repo, opts):
2074 2075 # Default --rev value depends on --follow but --follow behavior
2075 2076 # depends on revisions resolved from --rev...
2076 2077 follow = opts.get('follow') or opts.get('follow_first')
2077 2078 if opts.get('rev'):
2078 2079 revs = scmutil.revrange(repo, opts['rev'])
2079 2080 elif follow and repo.dirstate.p1() == nullid:
2080 2081 revs = revset.baseset()
2081 2082 elif follow:
2082 2083 revs = repo.revs('reverse(:.)')
2083 2084 else:
2084 2085 revs = revset.spanset(repo)
2085 2086 revs.reverse()
2086 2087 return revs
2087 2088
2088 2089 def getgraphlogrevs(repo, pats, opts):
2089 2090 """Return (revs, expr, filematcher) where revs is an iterable of
2090 2091 revision numbers, expr is a revset string built from log options
2091 2092 and file patterns or None, and used to filter 'revs'. If --stat or
2092 2093 --patch are not passed filematcher is None. Otherwise it is a
2093 2094 callable taking a revision number and returning a match objects
2094 2095 filtering the files to be detailed when displaying the revision.
2095 2096 """
2096 2097 limit = loglimit(opts)
2097 2098 revs = _logrevs(repo, opts)
2098 2099 if not revs:
2099 2100 return revset.baseset(), None, None
2100 2101 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2101 2102 if opts.get('rev'):
2102 2103 # User-specified revs might be unsorted, but don't sort before
2103 2104 # _makelogrevset because it might depend on the order of revs
2104 2105 revs.sort(reverse=True)
2105 2106 if expr:
2106 2107 # Revset matchers often operate faster on revisions in changelog
2107 2108 # order, because most filters deal with the changelog.
2108 2109 revs.reverse()
2109 2110 matcher = revset.match(repo.ui, expr)
2110 2111 # Revset matches can reorder revisions. "A or B" typically returns
2111 2112 # returns the revision matching A then the revision matching B. Sort
2112 2113 # again to fix that.
2113 2114 revs = matcher(repo, revs)
2114 2115 revs.sort(reverse=True)
2115 2116 if limit is not None:
2116 2117 limitedrevs = []
2117 2118 for idx, rev in enumerate(revs):
2118 2119 if idx >= limit:
2119 2120 break
2120 2121 limitedrevs.append(rev)
2121 2122 revs = revset.baseset(limitedrevs)
2122 2123
2123 2124 return revs, expr, filematcher
2124 2125
2125 2126 def getlogrevs(repo, pats, opts):
2126 2127 """Return (revs, expr, filematcher) where revs is an iterable of
2127 2128 revision numbers, expr is a revset string built from log options
2128 2129 and file patterns or None, and used to filter 'revs'. If --stat or
2129 2130 --patch are not passed filematcher is None. Otherwise it is a
2130 2131 callable taking a revision number and returning a match objects
2131 2132 filtering the files to be detailed when displaying the revision.
2132 2133 """
2133 2134 limit = loglimit(opts)
2134 2135 revs = _logrevs(repo, opts)
2135 2136 if not revs:
2136 2137 return revset.baseset([]), None, None
2137 2138 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2138 2139 if expr:
2139 2140 # Revset matchers often operate faster on revisions in changelog
2140 2141 # order, because most filters deal with the changelog.
2141 2142 if not opts.get('rev'):
2142 2143 revs.reverse()
2143 2144 matcher = revset.match(repo.ui, expr)
2144 2145 # Revset matches can reorder revisions. "A or B" typically returns
2145 2146 # returns the revision matching A then the revision matching B. Sort
2146 2147 # again to fix that.
2147 2148 revs = matcher(repo, revs)
2148 2149 if not opts.get('rev'):
2149 2150 revs.sort(reverse=True)
2150 2151 if limit is not None:
2151 2152 limitedrevs = []
2152 2153 for idx, r in enumerate(revs):
2153 2154 if limit <= idx:
2154 2155 break
2155 2156 limitedrevs.append(r)
2156 2157 revs = revset.baseset(limitedrevs)
2157 2158
2158 2159 return revs, expr, filematcher
2159 2160
2160 2161 def displaygraph(ui, dag, displayer, showparents, edgefn, getrenamed=None,
2161 2162 filematcher=None):
2162 2163 seen, state = [], graphmod.asciistate()
2163 2164 for rev, type, ctx, parents in dag:
2164 2165 char = 'o'
2165 2166 if ctx.node() in showparents:
2166 2167 char = '@'
2167 2168 elif ctx.obsolete():
2168 2169 char = 'x'
2169 2170 elif ctx.closesbranch():
2170 2171 char = '_'
2171 2172 copies = None
2172 2173 if getrenamed and ctx.rev():
2173 2174 copies = []
2174 2175 for fn in ctx.files():
2175 2176 rename = getrenamed(fn, ctx.rev())
2176 2177 if rename:
2177 2178 copies.append((fn, rename[0]))
2178 2179 revmatchfn = None
2179 2180 if filematcher is not None:
2180 2181 revmatchfn = filematcher(ctx.rev())
2181 2182 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2182 2183 lines = displayer.hunk.pop(rev).split('\n')
2183 2184 if not lines[-1]:
2184 2185 del lines[-1]
2185 2186 displayer.flush(ctx)
2186 2187 edges = edgefn(type, char, lines, seen, rev, parents)
2187 2188 for type, char, lines, coldata in edges:
2188 2189 graphmod.ascii(ui, state, type, char, lines, coldata)
2189 2190 displayer.close()
2190 2191
2191 2192 def graphlog(ui, repo, *pats, **opts):
2192 2193 # Parameters are identical to log command ones
2193 2194 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2194 2195 revdag = graphmod.dagwalker(repo, revs)
2195 2196
2196 2197 getrenamed = None
2197 2198 if opts.get('copies'):
2198 2199 endrev = None
2199 2200 if opts.get('rev'):
2200 2201 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2201 2202 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2202 2203 displayer = show_changeset(ui, repo, opts, buffered=True)
2203 2204 showparents = [ctx.node() for ctx in repo[None].parents()]
2204 2205 displaygraph(ui, revdag, displayer, showparents,
2205 2206 graphmod.asciiedges, getrenamed, filematcher)
2206 2207
2207 2208 def checkunsupportedgraphflags(pats, opts):
2208 2209 for op in ["newest_first"]:
2209 2210 if op in opts and opts[op]:
2210 2211 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2211 2212 % op.replace("_", "-"))
2212 2213
2213 2214 def graphrevs(repo, nodes, opts):
2214 2215 limit = loglimit(opts)
2215 2216 nodes.reverse()
2216 2217 if limit is not None:
2217 2218 nodes = nodes[:limit]
2218 2219 return graphmod.nodes(repo, nodes)
2219 2220
2220 2221 def add(ui, repo, match, prefix, explicitonly, **opts):
2221 2222 join = lambda f: os.path.join(prefix, f)
2222 2223 bad = []
2223 2224
2224 2225 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2225 2226 names = []
2226 2227 wctx = repo[None]
2227 2228 cca = None
2228 2229 abort, warn = scmutil.checkportabilityalert(ui)
2229 2230 if abort or warn:
2230 2231 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2231 2232
2232 2233 badmatch = matchmod.badmatch(match, badfn)
2233 2234 dirstate = repo.dirstate
2234 2235 # We don't want to just call wctx.walk here, since it would return a lot of
2235 2236 # clean files, which we aren't interested in and takes time.
2236 2237 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2237 2238 True, False, full=False)):
2238 2239 exact = match.exact(f)
2239 2240 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2240 2241 if cca:
2241 2242 cca(f)
2242 2243 names.append(f)
2243 2244 if ui.verbose or not exact:
2244 2245 ui.status(_('adding %s\n') % match.rel(f))
2245 2246
2246 2247 for subpath in sorted(wctx.substate):
2247 2248 sub = wctx.sub(subpath)
2248 2249 try:
2249 2250 submatch = matchmod.narrowmatcher(subpath, match)
2250 2251 if opts.get('subrepos'):
2251 2252 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2252 2253 else:
2253 2254 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2254 2255 except error.LookupError:
2255 2256 ui.status(_("skipping missing subrepository: %s\n")
2256 2257 % join(subpath))
2257 2258
2258 2259 if not opts.get('dry_run'):
2259 2260 rejected = wctx.add(names, prefix)
2260 2261 bad.extend(f for f in rejected if f in match.files())
2261 2262 return bad
2262 2263
2263 2264 def forget(ui, repo, match, prefix, explicitonly):
2264 2265 join = lambda f: os.path.join(prefix, f)
2265 2266 bad = []
2266 2267 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2267 2268 wctx = repo[None]
2268 2269 forgot = []
2269 2270
2270 2271 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2271 2272 forget = sorted(s[0] + s[1] + s[3] + s[6])
2272 2273 if explicitonly:
2273 2274 forget = [f for f in forget if match.exact(f)]
2274 2275
2275 2276 for subpath in sorted(wctx.substate):
2276 2277 sub = wctx.sub(subpath)
2277 2278 try:
2278 2279 submatch = matchmod.narrowmatcher(subpath, match)
2279 2280 subbad, subforgot = sub.forget(submatch, prefix)
2280 2281 bad.extend([subpath + '/' + f for f in subbad])
2281 2282 forgot.extend([subpath + '/' + f for f in subforgot])
2282 2283 except error.LookupError:
2283 2284 ui.status(_("skipping missing subrepository: %s\n")
2284 2285 % join(subpath))
2285 2286
2286 2287 if not explicitonly:
2287 2288 for f in match.files():
2288 2289 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2289 2290 if f not in forgot:
2290 2291 if repo.wvfs.exists(f):
2291 2292 # Don't complain if the exact case match wasn't given.
2292 2293 # But don't do this until after checking 'forgot', so
2293 2294 # that subrepo files aren't normalized, and this op is
2294 2295 # purely from data cached by the status walk above.
2295 2296 if repo.dirstate.normalize(f) in repo.dirstate:
2296 2297 continue
2297 2298 ui.warn(_('not removing %s: '
2298 2299 'file is already untracked\n')
2299 2300 % match.rel(f))
2300 2301 bad.append(f)
2301 2302
2302 2303 for f in forget:
2303 2304 if ui.verbose or not match.exact(f):
2304 2305 ui.status(_('removing %s\n') % match.rel(f))
2305 2306
2306 2307 rejected = wctx.forget(forget, prefix)
2307 2308 bad.extend(f for f in rejected if f in match.files())
2308 2309 forgot.extend(f for f in forget if f not in rejected)
2309 2310 return bad, forgot
2310 2311
2311 2312 def files(ui, ctx, m, fm, fmt, subrepos):
2312 2313 rev = ctx.rev()
2313 2314 ret = 1
2314 2315 ds = ctx.repo().dirstate
2315 2316
2316 2317 for f in ctx.matches(m):
2317 2318 if rev is None and ds[f] == 'r':
2318 2319 continue
2319 2320 fm.startitem()
2320 2321 if ui.verbose:
2321 2322 fc = ctx[f]
2322 2323 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2323 2324 fm.data(abspath=f)
2324 2325 fm.write('path', fmt, m.rel(f))
2325 2326 ret = 0
2326 2327
2327 2328 for subpath in sorted(ctx.substate):
2328 2329 def matchessubrepo(subpath):
2329 2330 return (m.always() or m.exact(subpath)
2330 2331 or any(f.startswith(subpath + '/') for f in m.files()))
2331 2332
2332 2333 if subrepos or matchessubrepo(subpath):
2333 2334 sub = ctx.sub(subpath)
2334 2335 try:
2335 2336 submatch = matchmod.narrowmatcher(subpath, m)
2336 2337 if sub.printfiles(ui, submatch, fm, fmt, subrepos) == 0:
2337 2338 ret = 0
2338 2339 except error.LookupError:
2339 2340 ui.status(_("skipping missing subrepository: %s\n")
2340 2341 % m.abs(subpath))
2341 2342
2342 2343 return ret
2343 2344
2344 2345 def remove(ui, repo, m, prefix, after, force, subrepos):
2345 2346 join = lambda f: os.path.join(prefix, f)
2346 2347 ret = 0
2347 2348 s = repo.status(match=m, clean=True)
2348 2349 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2349 2350
2350 2351 wctx = repo[None]
2351 2352
2352 2353 for subpath in sorted(wctx.substate):
2353 2354 def matchessubrepo(matcher, subpath):
2354 2355 if matcher.exact(subpath):
2355 2356 return True
2356 2357 for f in matcher.files():
2357 2358 if f.startswith(subpath):
2358 2359 return True
2359 2360 return False
2360 2361
2361 2362 if subrepos or matchessubrepo(m, subpath):
2362 2363 sub = wctx.sub(subpath)
2363 2364 try:
2364 2365 submatch = matchmod.narrowmatcher(subpath, m)
2365 2366 if sub.removefiles(submatch, prefix, after, force, subrepos):
2366 2367 ret = 1
2367 2368 except error.LookupError:
2368 2369 ui.status(_("skipping missing subrepository: %s\n")
2369 2370 % join(subpath))
2370 2371
2371 2372 # warn about failure to delete explicit files/dirs
2372 2373 deleteddirs = util.dirs(deleted)
2373 2374 for f in m.files():
2374 2375 def insubrepo():
2375 2376 for subpath in wctx.substate:
2376 2377 if f.startswith(subpath):
2377 2378 return True
2378 2379 return False
2379 2380
2380 2381 isdir = f in deleteddirs or wctx.hasdir(f)
2381 2382 if f in repo.dirstate or isdir or f == '.' or insubrepo():
2382 2383 continue
2383 2384
2384 2385 if repo.wvfs.exists(f):
2385 2386 if repo.wvfs.isdir(f):
2386 2387 ui.warn(_('not removing %s: no tracked files\n')
2387 2388 % m.rel(f))
2388 2389 else:
2389 2390 ui.warn(_('not removing %s: file is untracked\n')
2390 2391 % m.rel(f))
2391 2392 # missing files will generate a warning elsewhere
2392 2393 ret = 1
2393 2394
2394 2395 if force:
2395 2396 list = modified + deleted + clean + added
2396 2397 elif after:
2397 2398 list = deleted
2398 2399 for f in modified + added + clean:
2399 2400 ui.warn(_('not removing %s: file still exists\n') % m.rel(f))
2400 2401 ret = 1
2401 2402 else:
2402 2403 list = deleted + clean
2403 2404 for f in modified:
2404 2405 ui.warn(_('not removing %s: file is modified (use -f'
2405 2406 ' to force removal)\n') % m.rel(f))
2406 2407 ret = 1
2407 2408 for f in added:
2408 2409 ui.warn(_('not removing %s: file has been marked for add'
2409 2410 ' (use forget to undo)\n') % m.rel(f))
2410 2411 ret = 1
2411 2412
2412 2413 for f in sorted(list):
2413 2414 if ui.verbose or not m.exact(f):
2414 2415 ui.status(_('removing %s\n') % m.rel(f))
2415 2416
2416 2417 wlock = repo.wlock()
2417 2418 try:
2418 2419 if not after:
2419 2420 for f in list:
2420 2421 if f in added:
2421 2422 continue # we never unlink added files on remove
2422 2423 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
2423 2424 repo[None].forget(list)
2424 2425 finally:
2425 2426 wlock.release()
2426 2427
2427 2428 return ret
2428 2429
2429 2430 def cat(ui, repo, ctx, matcher, prefix, **opts):
2430 2431 err = 1
2431 2432
2432 2433 def write(path):
2433 2434 fp = makefileobj(repo, opts.get('output'), ctx.node(),
2434 2435 pathname=os.path.join(prefix, path))
2435 2436 data = ctx[path].data()
2436 2437 if opts.get('decode'):
2437 2438 data = repo.wwritedata(path, data)
2438 2439 fp.write(data)
2439 2440 fp.close()
2440 2441
2441 2442 # Automation often uses hg cat on single files, so special case it
2442 2443 # for performance to avoid the cost of parsing the manifest.
2443 2444 if len(matcher.files()) == 1 and not matcher.anypats():
2444 2445 file = matcher.files()[0]
2445 2446 mf = repo.manifest
2446 2447 mfnode = ctx.manifestnode()
2447 2448 if mfnode and mf.find(mfnode, file)[0]:
2448 2449 write(file)
2449 2450 return 0
2450 2451
2451 2452 # Don't warn about "missing" files that are really in subrepos
2452 2453 def badfn(path, msg):
2453 2454 for subpath in ctx.substate:
2454 2455 if path.startswith(subpath):
2455 2456 return
2456 2457 matcher.bad(path, msg)
2457 2458
2458 2459 for abs in ctx.walk(matchmod.badmatch(matcher, badfn)):
2459 2460 write(abs)
2460 2461 err = 0
2461 2462
2462 2463 for subpath in sorted(ctx.substate):
2463 2464 sub = ctx.sub(subpath)
2464 2465 try:
2465 2466 submatch = matchmod.narrowmatcher(subpath, matcher)
2466 2467
2467 2468 if not sub.cat(submatch, os.path.join(prefix, sub._path),
2468 2469 **opts):
2469 2470 err = 0
2470 2471 except error.RepoLookupError:
2471 2472 ui.status(_("skipping missing subrepository: %s\n")
2472 2473 % os.path.join(prefix, subpath))
2473 2474
2474 2475 return err
2475 2476
2476 2477 def commit(ui, repo, commitfunc, pats, opts):
2477 2478 '''commit the specified files or all outstanding changes'''
2478 2479 date = opts.get('date')
2479 2480 if date:
2480 2481 opts['date'] = util.parsedate(date)
2481 2482 message = logmessage(ui, opts)
2482 2483 matcher = scmutil.match(repo[None], pats, opts)
2483 2484
2484 2485 # extract addremove carefully -- this function can be called from a command
2485 2486 # that doesn't support addremove
2486 2487 if opts.get('addremove'):
2487 2488 if scmutil.addremove(repo, matcher, "", opts) != 0:
2488 2489 raise error.Abort(
2489 2490 _("failed to mark all new/missing files as added/removed"))
2490 2491
2491 2492 return commitfunc(ui, repo, message, matcher, opts)
2492 2493
2493 2494 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2494 2495 # avoid cycle context -> subrepo -> cmdutil
2495 2496 import context
2496 2497
2497 2498 # amend will reuse the existing user if not specified, but the obsolete
2498 2499 # marker creation requires that the current user's name is specified.
2499 2500 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2500 2501 ui.username() # raise exception if username not set
2501 2502
2502 2503 ui.note(_('amending changeset %s\n') % old)
2503 2504 base = old.p1()
2504 2505 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2505 2506
2506 2507 wlock = lock = newid = None
2507 2508 try:
2508 2509 wlock = repo.wlock()
2509 2510 lock = repo.lock()
2510 2511 tr = repo.transaction('amend')
2511 2512 try:
2512 2513 # See if we got a message from -m or -l, if not, open the editor
2513 2514 # with the message of the changeset to amend
2514 2515 message = logmessage(ui, opts)
2515 2516 # ensure logfile does not conflict with later enforcement of the
2516 2517 # message. potential logfile content has been processed by
2517 2518 # `logmessage` anyway.
2518 2519 opts.pop('logfile')
2519 2520 # First, do a regular commit to record all changes in the working
2520 2521 # directory (if there are any)
2521 2522 ui.callhooks = False
2522 2523 activebookmark = repo._activebookmark
2523 2524 try:
2524 2525 repo._activebookmark = None
2525 2526 opts['message'] = 'temporary amend commit for %s' % old
2526 2527 node = commit(ui, repo, commitfunc, pats, opts)
2527 2528 finally:
2528 2529 repo._activebookmark = activebookmark
2529 2530 ui.callhooks = True
2530 2531 ctx = repo[node]
2531 2532
2532 2533 # Participating changesets:
2533 2534 #
2534 2535 # node/ctx o - new (intermediate) commit that contains changes
2535 2536 # | from working dir to go into amending commit
2536 2537 # | (or a workingctx if there were no changes)
2537 2538 # |
2538 2539 # old o - changeset to amend
2539 2540 # |
2540 2541 # base o - parent of amending changeset
2541 2542
2542 2543 # Update extra dict from amended commit (e.g. to preserve graft
2543 2544 # source)
2544 2545 extra.update(old.extra())
2545 2546
2546 2547 # Also update it from the intermediate commit or from the wctx
2547 2548 extra.update(ctx.extra())
2548 2549
2549 2550 if len(old.parents()) > 1:
2550 2551 # ctx.files() isn't reliable for merges, so fall back to the
2551 2552 # slower repo.status() method
2552 2553 files = set([fn for st in repo.status(base, old)[:3]
2553 2554 for fn in st])
2554 2555 else:
2555 2556 files = set(old.files())
2556 2557
2557 2558 # Second, we use either the commit we just did, or if there were no
2558 2559 # changes the parent of the working directory as the version of the
2559 2560 # files in the final amend commit
2560 2561 if node:
2561 2562 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2562 2563
2563 2564 user = ctx.user()
2564 2565 date = ctx.date()
2565 2566 # Recompute copies (avoid recording a -> b -> a)
2566 2567 copied = copies.pathcopies(base, ctx)
2567 2568 if old.p2:
2568 2569 copied.update(copies.pathcopies(old.p2(), ctx))
2569 2570
2570 2571 # Prune files which were reverted by the updates: if old
2571 2572 # introduced file X and our intermediate commit, node,
2572 2573 # renamed that file, then those two files are the same and
2573 2574 # we can discard X from our list of files. Likewise if X
2574 2575 # was deleted, it's no longer relevant
2575 2576 files.update(ctx.files())
2576 2577
2577 2578 def samefile(f):
2578 2579 if f in ctx.manifest():
2579 2580 a = ctx.filectx(f)
2580 2581 if f in base.manifest():
2581 2582 b = base.filectx(f)
2582 2583 return (not a.cmp(b)
2583 2584 and a.flags() == b.flags())
2584 2585 else:
2585 2586 return False
2586 2587 else:
2587 2588 return f not in base.manifest()
2588 2589 files = [f for f in files if not samefile(f)]
2589 2590
2590 2591 def filectxfn(repo, ctx_, path):
2591 2592 try:
2592 2593 fctx = ctx[path]
2593 2594 flags = fctx.flags()
2594 2595 mctx = context.memfilectx(repo,
2595 2596 fctx.path(), fctx.data(),
2596 2597 islink='l' in flags,
2597 2598 isexec='x' in flags,
2598 2599 copied=copied.get(path))
2599 2600 return mctx
2600 2601 except KeyError:
2601 2602 return None
2602 2603 else:
2603 2604 ui.note(_('copying changeset %s to %s\n') % (old, base))
2604 2605
2605 2606 # Use version of files as in the old cset
2606 2607 def filectxfn(repo, ctx_, path):
2607 2608 try:
2608 2609 return old.filectx(path)
2609 2610 except KeyError:
2610 2611 return None
2611 2612
2612 2613 user = opts.get('user') or old.user()
2613 2614 date = opts.get('date') or old.date()
2614 2615 editform = mergeeditform(old, 'commit.amend')
2615 2616 editor = getcommiteditor(editform=editform, **opts)
2616 2617 if not message:
2617 2618 editor = getcommiteditor(edit=True, editform=editform)
2618 2619 message = old.description()
2619 2620
2620 2621 pureextra = extra.copy()
2621 2622 extra['amend_source'] = old.hex()
2622 2623
2623 2624 new = context.memctx(repo,
2624 2625 parents=[base.node(), old.p2().node()],
2625 2626 text=message,
2626 2627 files=files,
2627 2628 filectxfn=filectxfn,
2628 2629 user=user,
2629 2630 date=date,
2630 2631 extra=extra,
2631 2632 editor=editor)
2632 2633
2633 2634 newdesc = changelog.stripdesc(new.description())
2634 2635 if ((not node)
2635 2636 and newdesc == old.description()
2636 2637 and user == old.user()
2637 2638 and date == old.date()
2638 2639 and pureextra == old.extra()):
2639 2640 # nothing changed. continuing here would create a new node
2640 2641 # anyway because of the amend_source noise.
2641 2642 #
2642 2643 # This not what we expect from amend.
2643 2644 return old.node()
2644 2645
2645 2646 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2646 2647 try:
2647 2648 if opts.get('secret'):
2648 2649 commitphase = 'secret'
2649 2650 else:
2650 2651 commitphase = old.phase()
2651 2652 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2652 2653 newid = repo.commitctx(new)
2653 2654 finally:
2654 2655 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2655 2656 if newid != old.node():
2656 2657 # Reroute the working copy parent to the new changeset
2657 2658 repo.setparents(newid, nullid)
2658 2659
2659 2660 # Move bookmarks from old parent to amend commit
2660 2661 bms = repo.nodebookmarks(old.node())
2661 2662 if bms:
2662 2663 marks = repo._bookmarks
2663 2664 for bm in bms:
2664 2665 ui.debug('moving bookmarks %r from %s to %s\n' %
2665 2666 (marks, old.hex(), hex(newid)))
2666 2667 marks[bm] = newid
2667 2668 marks.recordchange(tr)
2668 2669 #commit the whole amend process
2669 2670 if createmarkers:
2670 2671 # mark the new changeset as successor of the rewritten one
2671 2672 new = repo[newid]
2672 2673 obs = [(old, (new,))]
2673 2674 if node:
2674 2675 obs.append((ctx, ()))
2675 2676
2676 2677 obsolete.createmarkers(repo, obs)
2677 2678 tr.close()
2678 2679 finally:
2679 2680 tr.release()
2680 2681 if not createmarkers and newid != old.node():
2681 2682 # Strip the intermediate commit (if there was one) and the amended
2682 2683 # commit
2683 2684 if node:
2684 2685 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2685 2686 ui.note(_('stripping amended changeset %s\n') % old)
2686 2687 repair.strip(ui, repo, old.node(), topic='amend-backup')
2687 2688 finally:
2688 2689 lockmod.release(lock, wlock)
2689 2690 return newid
2690 2691
2691 2692 def commiteditor(repo, ctx, subs, editform=''):
2692 2693 if ctx.description():
2693 2694 return ctx.description()
2694 2695 return commitforceeditor(repo, ctx, subs, editform=editform,
2695 2696 unchangedmessagedetection=True)
2696 2697
2697 2698 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2698 2699 editform='', unchangedmessagedetection=False):
2699 2700 if not extramsg:
2700 2701 extramsg = _("Leave message empty to abort commit.")
2701 2702
2702 2703 forms = [e for e in editform.split('.') if e]
2703 2704 forms.insert(0, 'changeset')
2704 2705 templatetext = None
2705 2706 while forms:
2706 2707 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2707 2708 if tmpl:
2708 2709 templatetext = committext = buildcommittemplate(
2709 2710 repo, ctx, subs, extramsg, tmpl)
2710 2711 break
2711 2712 forms.pop()
2712 2713 else:
2713 2714 committext = buildcommittext(repo, ctx, subs, extramsg)
2714 2715
2715 2716 # run editor in the repository root
2716 2717 olddir = os.getcwd()
2717 2718 os.chdir(repo.root)
2718 2719
2719 2720 # make in-memory changes visible to external process
2720 2721 tr = repo.currenttransaction()
2721 2722 repo.dirstate.write(tr)
2722 2723 pending = tr and tr.writepending() and repo.root
2723 2724
2724 2725 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2725 2726 editform=editform, pending=pending)
2726 2727 text = re.sub("(?m)^HG:.*(\n|$)", "", editortext)
2727 2728 os.chdir(olddir)
2728 2729
2729 2730 if finishdesc:
2730 2731 text = finishdesc(text)
2731 2732 if not text.strip():
2732 2733 raise error.Abort(_("empty commit message"))
2733 2734 if unchangedmessagedetection and editortext == templatetext:
2734 2735 raise error.Abort(_("commit message unchanged"))
2735 2736
2736 2737 return text
2737 2738
2738 2739 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2739 2740 ui = repo.ui
2740 2741 tmpl, mapfile = gettemplate(ui, tmpl, None)
2741 2742
2742 2743 try:
2743 2744 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2744 2745 except SyntaxError as inst:
2745 2746 raise error.Abort(inst.args[0])
2746 2747
2747 2748 for k, v in repo.ui.configitems('committemplate'):
2748 2749 if k != 'changeset':
2749 2750 t.t.cache[k] = v
2750 2751
2751 2752 if not extramsg:
2752 2753 extramsg = '' # ensure that extramsg is string
2753 2754
2754 2755 ui.pushbuffer()
2755 2756 t.show(ctx, extramsg=extramsg)
2756 2757 return ui.popbuffer()
2757 2758
2758 2759 def hgprefix(msg):
2759 2760 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2760 2761
2761 2762 def buildcommittext(repo, ctx, subs, extramsg):
2762 2763 edittext = []
2763 2764 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2764 2765 if ctx.description():
2765 2766 edittext.append(ctx.description())
2766 2767 edittext.append("")
2767 2768 edittext.append("") # Empty line between message and comments.
2768 2769 edittext.append(hgprefix(_("Enter commit message."
2769 2770 " Lines beginning with 'HG:' are removed.")))
2770 2771 edittext.append(hgprefix(extramsg))
2771 2772 edittext.append("HG: --")
2772 2773 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2773 2774 if ctx.p2():
2774 2775 edittext.append(hgprefix(_("branch merge")))
2775 2776 if ctx.branch():
2776 2777 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2777 2778 if bookmarks.isactivewdirparent(repo):
2778 2779 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2779 2780 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2780 2781 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2781 2782 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2782 2783 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2783 2784 if not added and not modified and not removed:
2784 2785 edittext.append(hgprefix(_("no files changed")))
2785 2786 edittext.append("")
2786 2787
2787 2788 return "\n".join(edittext)
2788 2789
2789 2790 def commitstatus(repo, node, branch, bheads=None, opts=None):
2790 2791 if opts is None:
2791 2792 opts = {}
2792 2793 ctx = repo[node]
2793 2794 parents = ctx.parents()
2794 2795
2795 2796 if (not opts.get('amend') and bheads and node not in bheads and not
2796 2797 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2797 2798 repo.ui.status(_('created new head\n'))
2798 2799 # The message is not printed for initial roots. For the other
2799 2800 # changesets, it is printed in the following situations:
2800 2801 #
2801 2802 # Par column: for the 2 parents with ...
2802 2803 # N: null or no parent
2803 2804 # B: parent is on another named branch
2804 2805 # C: parent is a regular non head changeset
2805 2806 # H: parent was a branch head of the current branch
2806 2807 # Msg column: whether we print "created new head" message
2807 2808 # In the following, it is assumed that there already exists some
2808 2809 # initial branch heads of the current branch, otherwise nothing is
2809 2810 # printed anyway.
2810 2811 #
2811 2812 # Par Msg Comment
2812 2813 # N N y additional topo root
2813 2814 #
2814 2815 # B N y additional branch root
2815 2816 # C N y additional topo head
2816 2817 # H N n usual case
2817 2818 #
2818 2819 # B B y weird additional branch root
2819 2820 # C B y branch merge
2820 2821 # H B n merge with named branch
2821 2822 #
2822 2823 # C C y additional head from merge
2823 2824 # C H n merge with a head
2824 2825 #
2825 2826 # H H n head merge: head count decreases
2826 2827
2827 2828 if not opts.get('close_branch'):
2828 2829 for r in parents:
2829 2830 if r.closesbranch() and r.branch() == branch:
2830 2831 repo.ui.status(_('reopening closed branch head %d\n') % r)
2831 2832
2832 2833 if repo.ui.debugflag:
2833 2834 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2834 2835 elif repo.ui.verbose:
2835 2836 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2836 2837
2837 2838 def revert(ui, repo, ctx, parents, *pats, **opts):
2838 2839 parent, p2 = parents
2839 2840 node = ctx.node()
2840 2841
2841 2842 mf = ctx.manifest()
2842 2843 if node == p2:
2843 2844 parent = p2
2844 2845 if node == parent:
2845 2846 pmf = mf
2846 2847 else:
2847 2848 pmf = None
2848 2849
2849 2850 # need all matching names in dirstate and manifest of target rev,
2850 2851 # so have to walk both. do not print errors if files exist in one
2851 2852 # but not other. in both cases, filesets should be evaluated against
2852 2853 # workingctx to get consistent result (issue4497). this means 'set:**'
2853 2854 # cannot be used to select missing files from target rev.
2854 2855
2855 2856 # `names` is a mapping for all elements in working copy and target revision
2856 2857 # The mapping is in the form:
2857 2858 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2858 2859 names = {}
2859 2860
2860 2861 wlock = repo.wlock()
2861 2862 try:
2862 2863 ## filling of the `names` mapping
2863 2864 # walk dirstate to fill `names`
2864 2865
2865 2866 interactive = opts.get('interactive', False)
2866 2867 wctx = repo[None]
2867 2868 m = scmutil.match(wctx, pats, opts)
2868 2869
2869 2870 # we'll need this later
2870 2871 targetsubs = sorted(s for s in wctx.substate if m(s))
2871 2872
2872 2873 if not m.always():
2873 2874 for abs in repo.walk(matchmod.badmatch(m, lambda x, y: False)):
2874 2875 names[abs] = m.rel(abs), m.exact(abs)
2875 2876
2876 2877 # walk target manifest to fill `names`
2877 2878
2878 2879 def badfn(path, msg):
2879 2880 if path in names:
2880 2881 return
2881 2882 if path in ctx.substate:
2882 2883 return
2883 2884 path_ = path + '/'
2884 2885 for f in names:
2885 2886 if f.startswith(path_):
2886 2887 return
2887 2888 ui.warn("%s: %s\n" % (m.rel(path), msg))
2888 2889
2889 2890 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
2890 2891 if abs not in names:
2891 2892 names[abs] = m.rel(abs), m.exact(abs)
2892 2893
2893 2894 # Find status of all file in `names`.
2894 2895 m = scmutil.matchfiles(repo, names)
2895 2896
2896 2897 changes = repo.status(node1=node, match=m,
2897 2898 unknown=True, ignored=True, clean=True)
2898 2899 else:
2899 2900 changes = repo.status(node1=node, match=m)
2900 2901 for kind in changes:
2901 2902 for abs in kind:
2902 2903 names[abs] = m.rel(abs), m.exact(abs)
2903 2904
2904 2905 m = scmutil.matchfiles(repo, names)
2905 2906
2906 2907 modified = set(changes.modified)
2907 2908 added = set(changes.added)
2908 2909 removed = set(changes.removed)
2909 2910 _deleted = set(changes.deleted)
2910 2911 unknown = set(changes.unknown)
2911 2912 unknown.update(changes.ignored)
2912 2913 clean = set(changes.clean)
2913 2914 modadded = set()
2914 2915
2915 2916 # split between files known in target manifest and the others
2916 2917 smf = set(mf)
2917 2918
2918 2919 # determine the exact nature of the deleted changesets
2919 2920 deladded = _deleted - smf
2920 2921 deleted = _deleted - deladded
2921 2922
2922 2923 # We need to account for the state of the file in the dirstate,
2923 2924 # even when we revert against something else than parent. This will
2924 2925 # slightly alter the behavior of revert (doing back up or not, delete
2925 2926 # or just forget etc).
2926 2927 if parent == node:
2927 2928 dsmodified = modified
2928 2929 dsadded = added
2929 2930 dsremoved = removed
2930 2931 # store all local modifications, useful later for rename detection
2931 2932 localchanges = dsmodified | dsadded
2932 2933 modified, added, removed = set(), set(), set()
2933 2934 else:
2934 2935 changes = repo.status(node1=parent, match=m)
2935 2936 dsmodified = set(changes.modified)
2936 2937 dsadded = set(changes.added)
2937 2938 dsremoved = set(changes.removed)
2938 2939 # store all local modifications, useful later for rename detection
2939 2940 localchanges = dsmodified | dsadded
2940 2941
2941 2942 # only take into account for removes between wc and target
2942 2943 clean |= dsremoved - removed
2943 2944 dsremoved &= removed
2944 2945 # distinct between dirstate remove and other
2945 2946 removed -= dsremoved
2946 2947
2947 2948 modadded = added & dsmodified
2948 2949 added -= modadded
2949 2950
2950 2951 # tell newly modified apart.
2951 2952 dsmodified &= modified
2952 2953 dsmodified |= modified & dsadded # dirstate added may needs backup
2953 2954 modified -= dsmodified
2954 2955
2955 2956 # We need to wait for some post-processing to update this set
2956 2957 # before making the distinction. The dirstate will be used for
2957 2958 # that purpose.
2958 2959 dsadded = added
2959 2960
2960 2961 # in case of merge, files that are actually added can be reported as
2961 2962 # modified, we need to post process the result
2962 2963 if p2 != nullid:
2963 2964 if pmf is None:
2964 2965 # only need parent manifest in the merge case,
2965 2966 # so do not read by default
2966 2967 pmf = repo[parent].manifest()
2967 2968 mergeadd = dsmodified - set(pmf)
2968 2969 dsadded |= mergeadd
2969 2970 dsmodified -= mergeadd
2970 2971
2971 2972 # if f is a rename, update `names` to also revert the source
2972 2973 cwd = repo.getcwd()
2973 2974 for f in localchanges:
2974 2975 src = repo.dirstate.copied(f)
2975 2976 # XXX should we check for rename down to target node?
2976 2977 if src and src not in names and repo.dirstate[src] == 'r':
2977 2978 dsremoved.add(src)
2978 2979 names[src] = (repo.pathto(src, cwd), True)
2979 2980
2980 2981 # distinguish between file to forget and the other
2981 2982 added = set()
2982 2983 for abs in dsadded:
2983 2984 if repo.dirstate[abs] != 'a':
2984 2985 added.add(abs)
2985 2986 dsadded -= added
2986 2987
2987 2988 for abs in deladded:
2988 2989 if repo.dirstate[abs] == 'a':
2989 2990 dsadded.add(abs)
2990 2991 deladded -= dsadded
2991 2992
2992 2993 # For files marked as removed, we check if an unknown file is present at
2993 2994 # the same path. If a such file exists it may need to be backed up.
2994 2995 # Making the distinction at this stage helps have simpler backup
2995 2996 # logic.
2996 2997 removunk = set()
2997 2998 for abs in removed:
2998 2999 target = repo.wjoin(abs)
2999 3000 if os.path.lexists(target):
3000 3001 removunk.add(abs)
3001 3002 removed -= removunk
3002 3003
3003 3004 dsremovunk = set()
3004 3005 for abs in dsremoved:
3005 3006 target = repo.wjoin(abs)
3006 3007 if os.path.lexists(target):
3007 3008 dsremovunk.add(abs)
3008 3009 dsremoved -= dsremovunk
3009 3010
3010 3011 # action to be actually performed by revert
3011 3012 # (<list of file>, message>) tuple
3012 3013 actions = {'revert': ([], _('reverting %s\n')),
3013 3014 'add': ([], _('adding %s\n')),
3014 3015 'remove': ([], _('removing %s\n')),
3015 3016 'drop': ([], _('removing %s\n')),
3016 3017 'forget': ([], _('forgetting %s\n')),
3017 3018 'undelete': ([], _('undeleting %s\n')),
3018 3019 'noop': (None, _('no changes needed to %s\n')),
3019 3020 'unknown': (None, _('file not managed: %s\n')),
3020 3021 }
3021 3022
3022 3023 # "constant" that convey the backup strategy.
3023 3024 # All set to `discard` if `no-backup` is set do avoid checking
3024 3025 # no_backup lower in the code.
3025 3026 # These values are ordered for comparison purposes
3026 3027 backup = 2 # unconditionally do backup
3027 3028 check = 1 # check if the existing file differs from target
3028 3029 discard = 0 # never do backup
3029 3030 if opts.get('no_backup'):
3030 3031 backup = check = discard
3031 3032
3032 3033 backupanddel = actions['remove']
3033 3034 if not opts.get('no_backup'):
3034 3035 backupanddel = actions['drop']
3035 3036
3036 3037 disptable = (
3037 3038 # dispatch table:
3038 3039 # file state
3039 3040 # action
3040 3041 # make backup
3041 3042
3042 3043 ## Sets that results that will change file on disk
3043 3044 # Modified compared to target, no local change
3044 3045 (modified, actions['revert'], discard),
3045 3046 # Modified compared to target, but local file is deleted
3046 3047 (deleted, actions['revert'], discard),
3047 3048 # Modified compared to target, local change
3048 3049 (dsmodified, actions['revert'], backup),
3049 3050 # Added since target
3050 3051 (added, actions['remove'], discard),
3051 3052 # Added in working directory
3052 3053 (dsadded, actions['forget'], discard),
3053 3054 # Added since target, have local modification
3054 3055 (modadded, backupanddel, backup),
3055 3056 # Added since target but file is missing in working directory
3056 3057 (deladded, actions['drop'], discard),
3057 3058 # Removed since target, before working copy parent
3058 3059 (removed, actions['add'], discard),
3059 3060 # Same as `removed` but an unknown file exists at the same path
3060 3061 (removunk, actions['add'], check),
3061 3062 # Removed since targe, marked as such in working copy parent
3062 3063 (dsremoved, actions['undelete'], discard),
3063 3064 # Same as `dsremoved` but an unknown file exists at the same path
3064 3065 (dsremovunk, actions['undelete'], check),
3065 3066 ## the following sets does not result in any file changes
3066 3067 # File with no modification
3067 3068 (clean, actions['noop'], discard),
3068 3069 # Existing file, not tracked anywhere
3069 3070 (unknown, actions['unknown'], discard),
3070 3071 )
3071 3072
3072 3073 for abs, (rel, exact) in sorted(names.items()):
3073 3074 # target file to be touch on disk (relative to cwd)
3074 3075 target = repo.wjoin(abs)
3075 3076 # search the entry in the dispatch table.
3076 3077 # if the file is in any of these sets, it was touched in the working
3077 3078 # directory parent and we are sure it needs to be reverted.
3078 3079 for table, (xlist, msg), dobackup in disptable:
3079 3080 if abs not in table:
3080 3081 continue
3081 3082 if xlist is not None:
3082 3083 xlist.append(abs)
3083 3084 if dobackup and (backup <= dobackup
3084 3085 or wctx[abs].cmp(ctx[abs])):
3085 3086 bakname = origpath(ui, repo, rel)
3086 3087 ui.note(_('saving current version of %s as %s\n') %
3087 3088 (rel, bakname))
3088 3089 if not opts.get('dry_run'):
3089 3090 if interactive:
3090 3091 util.copyfile(target, bakname)
3091 3092 else:
3092 3093 util.rename(target, bakname)
3093 3094 if ui.verbose or not exact:
3094 3095 if not isinstance(msg, basestring):
3095 3096 msg = msg(abs)
3096 3097 ui.status(msg % rel)
3097 3098 elif exact:
3098 3099 ui.warn(msg % rel)
3099 3100 break
3100 3101
3101 3102 if not opts.get('dry_run'):
3102 3103 needdata = ('revert', 'add', 'undelete')
3103 3104 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3104 3105 _performrevert(repo, parents, ctx, actions, interactive)
3105 3106
3106 3107 if targetsubs:
3107 3108 # Revert the subrepos on the revert list
3108 3109 for sub in targetsubs:
3109 3110 try:
3110 3111 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3111 3112 except KeyError:
3112 3113 raise error.Abort("subrepository '%s' does not exist in %s!"
3113 3114 % (sub, short(ctx.node())))
3114 3115 finally:
3115 3116 wlock.release()
3116 3117
3117 3118 def origpath(ui, repo, filepath):
3118 3119 '''customize where .orig files are created
3119 3120
3120 3121 Fetch user defined path from config file: [ui] origbackuppath = <path>
3121 3122 Fall back to default (filepath) if not specified
3122 3123 '''
3123 3124 origbackuppath = ui.config('ui', 'origbackuppath', None)
3124 3125 if origbackuppath is None:
3125 3126 return filepath + ".orig"
3126 3127
3127 3128 filepathfromroot = os.path.relpath(filepath, start=repo.root)
3128 3129 fullorigpath = repo.wjoin(origbackuppath, filepathfromroot)
3129 3130
3130 3131 origbackupdir = repo.vfs.dirname(fullorigpath)
3131 3132 if not repo.vfs.exists(origbackupdir):
3132 3133 ui.note(_('creating directory: %s\n') % origbackupdir)
3133 3134 util.makedirs(origbackupdir)
3134 3135
3135 3136 return fullorigpath + ".orig"
3136 3137
3137 3138 def _revertprefetch(repo, ctx, *files):
3138 3139 """Let extension changing the storage layer prefetch content"""
3139 3140 pass
3140 3141
3141 3142 def _performrevert(repo, parents, ctx, actions, interactive=False):
3142 3143 """function that actually perform all the actions computed for revert
3143 3144
3144 3145 This is an independent function to let extension to plug in and react to
3145 3146 the imminent revert.
3146 3147
3147 3148 Make sure you have the working directory locked when calling this function.
3148 3149 """
3149 3150 parent, p2 = parents
3150 3151 node = ctx.node()
3151 3152 def checkout(f):
3152 3153 fc = ctx[f]
3153 3154 repo.wwrite(f, fc.data(), fc.flags())
3154 3155
3155 3156 audit_path = pathutil.pathauditor(repo.root)
3156 3157 for f in actions['forget'][0]:
3157 3158 repo.dirstate.drop(f)
3158 3159 for f in actions['remove'][0]:
3159 3160 audit_path(f)
3160 3161 try:
3161 3162 util.unlinkpath(repo.wjoin(f))
3162 3163 except OSError:
3163 3164 pass
3164 3165 repo.dirstate.remove(f)
3165 3166 for f in actions['drop'][0]:
3166 3167 audit_path(f)
3167 3168 repo.dirstate.remove(f)
3168 3169
3169 3170 normal = None
3170 3171 if node == parent:
3171 3172 # We're reverting to our parent. If possible, we'd like status
3172 3173 # to report the file as clean. We have to use normallookup for
3173 3174 # merges to avoid losing information about merged/dirty files.
3174 3175 if p2 != nullid:
3175 3176 normal = repo.dirstate.normallookup
3176 3177 else:
3177 3178 normal = repo.dirstate.normal
3178 3179
3179 3180 newlyaddedandmodifiedfiles = set()
3180 3181 if interactive:
3181 3182 # Prompt the user for changes to revert
3182 3183 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3183 3184 m = scmutil.match(ctx, torevert, {})
3184 3185 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3185 3186 diffopts.nodates = True
3186 3187 diffopts.git = True
3187 3188 reversehunks = repo.ui.configbool('experimental',
3188 3189 'revertalternateinteractivemode',
3189 3190 True)
3190 3191 if reversehunks:
3191 3192 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3192 3193 else:
3193 3194 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3194 3195 originalchunks = patch.parsepatch(diff)
3195 3196
3196 3197 try:
3197 3198
3198 3199 chunks = recordfilter(repo.ui, originalchunks)
3199 3200 if reversehunks:
3200 3201 chunks = patch.reversehunks(chunks)
3201 3202
3202 3203 except patch.PatchError as err:
3203 3204 raise error.Abort(_('error parsing patch: %s') % err)
3204 3205
3205 3206 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3206 3207 # Apply changes
3207 3208 fp = cStringIO.StringIO()
3208 3209 for c in chunks:
3209 3210 c.write(fp)
3210 3211 dopatch = fp.tell()
3211 3212 fp.seek(0)
3212 3213 if dopatch:
3213 3214 try:
3214 3215 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3215 3216 except patch.PatchError as err:
3216 3217 raise error.Abort(str(err))
3217 3218 del fp
3218 3219 else:
3219 3220 for f in actions['revert'][0]:
3220 3221 checkout(f)
3221 3222 if normal:
3222 3223 normal(f)
3223 3224
3224 3225 for f in actions['add'][0]:
3225 3226 # Don't checkout modified files, they are already created by the diff
3226 3227 if f not in newlyaddedandmodifiedfiles:
3227 3228 checkout(f)
3228 3229 repo.dirstate.add(f)
3229 3230
3230 3231 normal = repo.dirstate.normallookup
3231 3232 if node == parent and p2 == nullid:
3232 3233 normal = repo.dirstate.normal
3233 3234 for f in actions['undelete'][0]:
3234 3235 checkout(f)
3235 3236 normal(f)
3236 3237
3237 3238 copied = copies.pathcopies(repo[parent], ctx)
3238 3239
3239 3240 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3240 3241 if f in copied:
3241 3242 repo.dirstate.copy(copied[f], f)
3242 3243
3243 3244 def command(table):
3244 3245 """Returns a function object to be used as a decorator for making commands.
3245 3246
3246 3247 This function receives a command table as its argument. The table should
3247 3248 be a dict.
3248 3249
3249 3250 The returned function can be used as a decorator for adding commands
3250 3251 to that command table. This function accepts multiple arguments to define
3251 3252 a command.
3252 3253
3253 3254 The first argument is the command name.
3254 3255
3255 3256 The options argument is an iterable of tuples defining command arguments.
3256 3257 See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
3257 3258
3258 3259 The synopsis argument defines a short, one line summary of how to use the
3259 3260 command. This shows up in the help output.
3260 3261
3261 3262 The norepo argument defines whether the command does not require a
3262 3263 local repository. Most commands operate against a repository, thus the
3263 3264 default is False.
3264 3265
3265 3266 The optionalrepo argument defines whether the command optionally requires
3266 3267 a local repository.
3267 3268
3268 3269 The inferrepo argument defines whether to try to find a repository from the
3269 3270 command line arguments. If True, arguments will be examined for potential
3270 3271 repository locations. See ``findrepo()``. If a repository is found, it
3271 3272 will be used.
3272 3273 """
3273 3274 def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False,
3274 3275 inferrepo=False):
3275 3276 def decorator(func):
3276 3277 if synopsis:
3277 3278 table[name] = func, list(options), synopsis
3278 3279 else:
3279 3280 table[name] = func, list(options)
3280 3281
3281 3282 if norepo:
3282 3283 # Avoid import cycle.
3283 3284 import commands
3284 3285 commands.norepo += ' %s' % ' '.join(parsealiases(name))
3285 3286
3286 3287 if optionalrepo:
3287 3288 import commands
3288 3289 commands.optionalrepo += ' %s' % ' '.join(parsealiases(name))
3289 3290
3290 3291 if inferrepo:
3291 3292 import commands
3292 3293 commands.inferrepo += ' %s' % ' '.join(parsealiases(name))
3293 3294
3294 3295 return func
3295 3296 return decorator
3296 3297
3297 3298 return cmd
3298 3299
3299 3300 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3300 3301 # commands.outgoing. "missing" is "missing" of the result of
3301 3302 # "findcommonoutgoing()"
3302 3303 outgoinghooks = util.hooks()
3303 3304
3304 3305 # a list of (ui, repo) functions called by commands.summary
3305 3306 summaryhooks = util.hooks()
3306 3307
3307 3308 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3308 3309 #
3309 3310 # functions should return tuple of booleans below, if 'changes' is None:
3310 3311 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3311 3312 #
3312 3313 # otherwise, 'changes' is a tuple of tuples below:
3313 3314 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3314 3315 # - (desturl, destbranch, destpeer, outgoing)
3315 3316 summaryremotehooks = util.hooks()
3316 3317
3317 3318 # A list of state files kept by multistep operations like graft.
3318 3319 # Since graft cannot be aborted, it is considered 'clearable' by update.
3319 3320 # note: bisect is intentionally excluded
3320 3321 # (state file, clearable, allowcommit, error, hint)
3321 3322 unfinishedstates = [
3322 3323 ('graftstate', True, False, _('graft in progress'),
3323 3324 _("use 'hg graft --continue' or 'hg update' to abort")),
3324 3325 ('updatestate', True, False, _('last update was interrupted'),
3325 3326 _("use 'hg update' to get a consistent checkout"))
3326 3327 ]
3327 3328
3328 3329 def checkunfinished(repo, commit=False):
3329 3330 '''Look for an unfinished multistep operation, like graft, and abort
3330 3331 if found. It's probably good to check this right before
3331 3332 bailifchanged().
3332 3333 '''
3333 3334 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3334 3335 if commit and allowcommit:
3335 3336 continue
3336 3337 if repo.vfs.exists(f):
3337 3338 raise error.Abort(msg, hint=hint)
3338 3339
3339 3340 def clearunfinished(repo):
3340 3341 '''Check for unfinished operations (as above), and clear the ones
3341 3342 that are clearable.
3342 3343 '''
3343 3344 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3344 3345 if not clearable and repo.vfs.exists(f):
3345 3346 raise error.Abort(msg, hint=hint)
3346 3347 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3347 3348 if clearable and repo.vfs.exists(f):
3348 3349 util.unlink(repo.join(f))
3349 3350
3350 3351 class dirstateguard(object):
3351 3352 '''Restore dirstate at unexpected failure.
3352 3353
3353 3354 At the construction, this class does:
3354 3355
3355 3356 - write current ``repo.dirstate`` out, and
3356 3357 - save ``.hg/dirstate`` into the backup file
3357 3358
3358 3359 This restores ``.hg/dirstate`` from backup file, if ``release()``
3359 3360 is invoked before ``close()``.
3360 3361
3361 3362 This just removes the backup file at ``close()`` before ``release()``.
3362 3363 '''
3363 3364
3364 3365 def __init__(self, repo, name):
3365 3366 self._repo = repo
3366 3367 self._suffix = '.backup.%s.%d' % (name, id(self))
3367 3368 repo.dirstate._savebackup(repo.currenttransaction(), self._suffix)
3368 3369 self._active = True
3369 3370 self._closed = False
3370 3371
3371 3372 def __del__(self):
3372 3373 if self._active: # still active
3373 3374 # this may occur, even if this class is used correctly:
3374 3375 # for example, releasing other resources like transaction
3375 3376 # may raise exception before ``dirstateguard.release`` in
3376 3377 # ``release(tr, ....)``.
3377 3378 self._abort()
3378 3379
3379 3380 def close(self):
3380 3381 if not self._active: # already inactivated
3381 3382 msg = (_("can't close already inactivated backup: dirstate%s")
3382 3383 % self._suffix)
3383 3384 raise error.Abort(msg)
3384 3385
3385 3386 self._repo.dirstate._clearbackup(self._repo.currenttransaction(),
3386 3387 self._suffix)
3387 3388 self._active = False
3388 3389 self._closed = True
3389 3390
3390 3391 def _abort(self):
3391 3392 self._repo.dirstate._restorebackup(self._repo.currenttransaction(),
3392 3393 self._suffix)
3393 3394 self._active = False
3394 3395
3395 3396 def release(self):
3396 3397 if not self._closed:
3397 3398 if not self._active: # already inactivated
3398 3399 msg = (_("can't release already inactivated backup:"
3399 3400 " dirstate%s")
3400 3401 % self._suffix)
3401 3402 raise error.Abort(msg)
3402 3403 self._abort()
General Comments 0
You need to be logged in to leave comments. Login now