##// END OF EJS Templates
record: extract code to compute newly added and modified files...
Laurent Charignon -
r25257:07326d76 default
parent child Browse files
Show More
@@ -1,3333 +1,3337 b''
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import hex, nullid, nullrev, short
9 9 from i18n import _
10 10 import os, sys, errno, re, tempfile, cStringIO, shutil
11 11 import util, scmutil, templater, patch, error, templatekw, revlog, copies
12 12 import match as matchmod
13 13 import context, repair, graphmod, revset, phases, obsolete, pathutil
14 14 import changelog
15 15 import bookmarks
16 16 import encoding
17 17 import crecord as crecordmod
18 18 import lock as lockmod
19 19
20 20 def ishunk(x):
21 21 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
22 22 return isinstance(x, hunkclasses)
23 23
24 def newandmodified(chunks, originalchunks):
25 newlyaddedandmodifiedfiles = set()
26 for chunk in chunks:
27 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
28 originalchunks:
29 newlyaddedandmodifiedfiles.add(chunk.header.filename())
30 return newlyaddedandmodifiedfiles
31
24 32 def parsealiases(cmd):
25 33 return cmd.lstrip("^").split("|")
26 34
27 35 def setupwrapcolorwrite(ui):
28 36 # wrap ui.write so diff output can be labeled/colorized
29 37 def wrapwrite(orig, *args, **kw):
30 38 label = kw.pop('label', '')
31 39 for chunk, l in patch.difflabel(lambda: args):
32 40 orig(chunk, label=label + l)
33 41
34 42 oldwrite = ui.write
35 43 def wrap(*args, **kwargs):
36 44 return wrapwrite(oldwrite, *args, **kwargs)
37 45 setattr(ui, 'write', wrap)
38 46 return oldwrite
39 47
40 48 def filterchunks(ui, originalhunks, usecurses, testfile):
41 49 if usecurses:
42 50 if testfile:
43 51 recordfn = crecordmod.testdecorator(testfile,
44 52 crecordmod.testchunkselector)
45 53 else:
46 54 recordfn = crecordmod.chunkselector
47 55
48 56 return crecordmod.filterpatch(ui, originalhunks, recordfn)
49 57
50 58 else:
51 59 return patch.filterpatch(ui, originalhunks)
52 60
53 61 def recordfilter(ui, originalhunks):
54 62 usecurses = ui.configbool('experimental', 'crecord', False)
55 63 testfile = ui.config('experimental', 'crecordtest', None)
56 64 oldwrite = setupwrapcolorwrite(ui)
57 65 try:
58 66 newchunks = filterchunks(ui, originalhunks, usecurses, testfile)
59 67 finally:
60 68 ui.write = oldwrite
61 69 return newchunks
62 70
63 71 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
64 72 filterfn, *pats, **opts):
65 73 import merge as mergemod
66 74
67 75 if not ui.interactive():
68 76 raise util.Abort(_('running non-interactively, use %s instead') %
69 77 cmdsuggest)
70 78
71 79 # make sure username is set before going interactive
72 80 if not opts.get('user'):
73 81 ui.username() # raise exception, username not provided
74 82
75 83 def recordfunc(ui, repo, message, match, opts):
76 84 """This is generic record driver.
77 85
78 86 Its job is to interactively filter local changes, and
79 87 accordingly prepare working directory into a state in which the
80 88 job can be delegated to a non-interactive commit command such as
81 89 'commit' or 'qrefresh'.
82 90
83 91 After the actual job is done by non-interactive command, the
84 92 working directory is restored to its original state.
85 93
86 94 In the end we'll record interesting changes, and everything else
87 95 will be left in place, so the user can continue working.
88 96 """
89 97
90 98 checkunfinished(repo, commit=True)
91 99 merge = len(repo[None].parents()) > 1
92 100 if merge:
93 101 raise util.Abort(_('cannot partially commit a merge '
94 102 '(use "hg commit" instead)'))
95 103
96 104 status = repo.status(match=match)
97 105 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
98 106 diffopts.nodates = True
99 107 diffopts.git = True
100 108 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
101 109 originalchunks = patch.parsepatch(originaldiff)
102 110
103 111 # 1. filter patch, so we have intending-to apply subset of it
104 112 try:
105 113 chunks = filterfn(ui, originalchunks)
106 114 except patch.PatchError, err:
107 115 raise util.Abort(_('error parsing patch: %s') % err)
108 116
109 117 # We need to keep a backup of files that have been newly added and
110 118 # modified during the recording process because there is a previous
111 119 # version without the edit in the workdir
112 newlyaddedandmodifiedfiles = set()
113 for chunk in chunks:
114 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
115 originalchunks:
116 newlyaddedandmodifiedfiles.add(chunk.header.filename())
120 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
117 121 contenders = set()
118 122 for h in chunks:
119 123 try:
120 124 contenders.update(set(h.files()))
121 125 except AttributeError:
122 126 pass
123 127
124 128 changed = status.modified + status.added + status.removed
125 129 newfiles = [f for f in changed if f in contenders]
126 130 if not newfiles:
127 131 ui.status(_('no changes to record\n'))
128 132 return 0
129 133
130 134 modified = set(status.modified)
131 135
132 136 # 2. backup changed files, so we can restore them in the end
133 137
134 138 if backupall:
135 139 tobackup = changed
136 140 else:
137 141 tobackup = [f for f in newfiles if f in modified or f in \
138 142 newlyaddedandmodifiedfiles]
139 143 backups = {}
140 144 if tobackup:
141 145 backupdir = repo.join('record-backups')
142 146 try:
143 147 os.mkdir(backupdir)
144 148 except OSError, err:
145 149 if err.errno != errno.EEXIST:
146 150 raise
147 151 try:
148 152 # backup continues
149 153 for f in tobackup:
150 154 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
151 155 dir=backupdir)
152 156 os.close(fd)
153 157 ui.debug('backup %r as %r\n' % (f, tmpname))
154 158 util.copyfile(repo.wjoin(f), tmpname)
155 159 shutil.copystat(repo.wjoin(f), tmpname)
156 160 backups[f] = tmpname
157 161
158 162 fp = cStringIO.StringIO()
159 163 for c in chunks:
160 164 fname = c.filename()
161 165 if fname in backups:
162 166 c.write(fp)
163 167 dopatch = fp.tell()
164 168 fp.seek(0)
165 169
166 170 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
167 171 # 3a. apply filtered patch to clean repo (clean)
168 172 if backups:
169 173 # Equivalent to hg.revert
170 174 choices = lambda key: key in backups
171 175 mergemod.update(repo, repo.dirstate.p1(),
172 176 False, True, choices)
173 177
174 178 # 3b. (apply)
175 179 if dopatch:
176 180 try:
177 181 ui.debug('applying patch\n')
178 182 ui.debug(fp.getvalue())
179 183 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
180 184 except patch.PatchError, err:
181 185 raise util.Abort(str(err))
182 186 del fp
183 187
184 188 # 4. We prepared working directory according to filtered
185 189 # patch. Now is the time to delegate the job to
186 190 # commit/qrefresh or the like!
187 191
188 192 # Make all of the pathnames absolute.
189 193 newfiles = [repo.wjoin(nf) for nf in newfiles]
190 194 return commitfunc(ui, repo, *newfiles, **opts)
191 195 finally:
192 196 # 5. finally restore backed-up files
193 197 try:
194 198 for realname, tmpname in backups.iteritems():
195 199 ui.debug('restoring %r to %r\n' % (tmpname, realname))
196 200 util.copyfile(tmpname, repo.wjoin(realname))
197 201 # Our calls to copystat() here and above are a
198 202 # hack to trick any editors that have f open that
199 203 # we haven't modified them.
200 204 #
201 205 # Also note that this racy as an editor could
202 206 # notice the file's mtime before we've finished
203 207 # writing it.
204 208 shutil.copystat(tmpname, repo.wjoin(realname))
205 209 os.unlink(tmpname)
206 210 if tobackup:
207 211 os.rmdir(backupdir)
208 212 except OSError:
209 213 pass
210 214
211 215 return commit(ui, repo, recordfunc, pats, opts)
212 216
213 217 def findpossible(cmd, table, strict=False):
214 218 """
215 219 Return cmd -> (aliases, command table entry)
216 220 for each matching command.
217 221 Return debug commands (or their aliases) only if no normal command matches.
218 222 """
219 223 choice = {}
220 224 debugchoice = {}
221 225
222 226 if cmd in table:
223 227 # short-circuit exact matches, "log" alias beats "^log|history"
224 228 keys = [cmd]
225 229 else:
226 230 keys = table.keys()
227 231
228 232 allcmds = []
229 233 for e in keys:
230 234 aliases = parsealiases(e)
231 235 allcmds.extend(aliases)
232 236 found = None
233 237 if cmd in aliases:
234 238 found = cmd
235 239 elif not strict:
236 240 for a in aliases:
237 241 if a.startswith(cmd):
238 242 found = a
239 243 break
240 244 if found is not None:
241 245 if aliases[0].startswith("debug") or found.startswith("debug"):
242 246 debugchoice[found] = (aliases, table[e])
243 247 else:
244 248 choice[found] = (aliases, table[e])
245 249
246 250 if not choice and debugchoice:
247 251 choice = debugchoice
248 252
249 253 return choice, allcmds
250 254
251 255 def findcmd(cmd, table, strict=True):
252 256 """Return (aliases, command table entry) for command string."""
253 257 choice, allcmds = findpossible(cmd, table, strict)
254 258
255 259 if cmd in choice:
256 260 return choice[cmd]
257 261
258 262 if len(choice) > 1:
259 263 clist = choice.keys()
260 264 clist.sort()
261 265 raise error.AmbiguousCommand(cmd, clist)
262 266
263 267 if choice:
264 268 return choice.values()[0]
265 269
266 270 raise error.UnknownCommand(cmd, allcmds)
267 271
268 272 def findrepo(p):
269 273 while not os.path.isdir(os.path.join(p, ".hg")):
270 274 oldp, p = p, os.path.dirname(p)
271 275 if p == oldp:
272 276 return None
273 277
274 278 return p
275 279
276 280 def bailifchanged(repo, merge=True):
277 281 if merge and repo.dirstate.p2() != nullid:
278 282 raise util.Abort(_('outstanding uncommitted merge'))
279 283 modified, added, removed, deleted = repo.status()[:4]
280 284 if modified or added or removed or deleted:
281 285 raise util.Abort(_('uncommitted changes'))
282 286 ctx = repo[None]
283 287 for s in sorted(ctx.substate):
284 288 ctx.sub(s).bailifchanged()
285 289
286 290 def logmessage(ui, opts):
287 291 """ get the log message according to -m and -l option """
288 292 message = opts.get('message')
289 293 logfile = opts.get('logfile')
290 294
291 295 if message and logfile:
292 296 raise util.Abort(_('options --message and --logfile are mutually '
293 297 'exclusive'))
294 298 if not message and logfile:
295 299 try:
296 300 if logfile == '-':
297 301 message = ui.fin.read()
298 302 else:
299 303 message = '\n'.join(util.readfile(logfile).splitlines())
300 304 except IOError, inst:
301 305 raise util.Abort(_("can't read commit message '%s': %s") %
302 306 (logfile, inst.strerror))
303 307 return message
304 308
305 309 def mergeeditform(ctxorbool, baseformname):
306 310 """return appropriate editform name (referencing a committemplate)
307 311
308 312 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
309 313 merging is committed.
310 314
311 315 This returns baseformname with '.merge' appended if it is a merge,
312 316 otherwise '.normal' is appended.
313 317 """
314 318 if isinstance(ctxorbool, bool):
315 319 if ctxorbool:
316 320 return baseformname + ".merge"
317 321 elif 1 < len(ctxorbool.parents()):
318 322 return baseformname + ".merge"
319 323
320 324 return baseformname + ".normal"
321 325
322 326 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
323 327 editform='', **opts):
324 328 """get appropriate commit message editor according to '--edit' option
325 329
326 330 'finishdesc' is a function to be called with edited commit message
327 331 (= 'description' of the new changeset) just after editing, but
328 332 before checking empty-ness. It should return actual text to be
329 333 stored into history. This allows to change description before
330 334 storing.
331 335
332 336 'extramsg' is a extra message to be shown in the editor instead of
333 337 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
334 338 is automatically added.
335 339
336 340 'editform' is a dot-separated list of names, to distinguish
337 341 the purpose of commit text editing.
338 342
339 343 'getcommiteditor' returns 'commitforceeditor' regardless of
340 344 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
341 345 they are specific for usage in MQ.
342 346 """
343 347 if edit or finishdesc or extramsg:
344 348 return lambda r, c, s: commitforceeditor(r, c, s,
345 349 finishdesc=finishdesc,
346 350 extramsg=extramsg,
347 351 editform=editform)
348 352 elif editform:
349 353 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
350 354 else:
351 355 return commiteditor
352 356
353 357 def loglimit(opts):
354 358 """get the log limit according to option -l/--limit"""
355 359 limit = opts.get('limit')
356 360 if limit:
357 361 try:
358 362 limit = int(limit)
359 363 except ValueError:
360 364 raise util.Abort(_('limit must be a positive integer'))
361 365 if limit <= 0:
362 366 raise util.Abort(_('limit must be positive'))
363 367 else:
364 368 limit = None
365 369 return limit
366 370
367 371 def makefilename(repo, pat, node, desc=None,
368 372 total=None, seqno=None, revwidth=None, pathname=None):
369 373 node_expander = {
370 374 'H': lambda: hex(node),
371 375 'R': lambda: str(repo.changelog.rev(node)),
372 376 'h': lambda: short(node),
373 377 'm': lambda: re.sub('[^\w]', '_', str(desc))
374 378 }
375 379 expander = {
376 380 '%': lambda: '%',
377 381 'b': lambda: os.path.basename(repo.root),
378 382 }
379 383
380 384 try:
381 385 if node:
382 386 expander.update(node_expander)
383 387 if node:
384 388 expander['r'] = (lambda:
385 389 str(repo.changelog.rev(node)).zfill(revwidth or 0))
386 390 if total is not None:
387 391 expander['N'] = lambda: str(total)
388 392 if seqno is not None:
389 393 expander['n'] = lambda: str(seqno)
390 394 if total is not None and seqno is not None:
391 395 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
392 396 if pathname is not None:
393 397 expander['s'] = lambda: os.path.basename(pathname)
394 398 expander['d'] = lambda: os.path.dirname(pathname) or '.'
395 399 expander['p'] = lambda: pathname
396 400
397 401 newname = []
398 402 patlen = len(pat)
399 403 i = 0
400 404 while i < patlen:
401 405 c = pat[i]
402 406 if c == '%':
403 407 i += 1
404 408 c = pat[i]
405 409 c = expander[c]()
406 410 newname.append(c)
407 411 i += 1
408 412 return ''.join(newname)
409 413 except KeyError, inst:
410 414 raise util.Abort(_("invalid format spec '%%%s' in output filename") %
411 415 inst.args[0])
412 416
413 417 def makefileobj(repo, pat, node=None, desc=None, total=None,
414 418 seqno=None, revwidth=None, mode='wb', modemap=None,
415 419 pathname=None):
416 420
417 421 writable = mode not in ('r', 'rb')
418 422
419 423 if not pat or pat == '-':
420 424 if writable:
421 425 fp = repo.ui.fout
422 426 else:
423 427 fp = repo.ui.fin
424 428 if util.safehasattr(fp, 'fileno'):
425 429 return os.fdopen(os.dup(fp.fileno()), mode)
426 430 else:
427 431 # if this fp can't be duped properly, return
428 432 # a dummy object that can be closed
429 433 class wrappedfileobj(object):
430 434 noop = lambda x: None
431 435 def __init__(self, f):
432 436 self.f = f
433 437 def __getattr__(self, attr):
434 438 if attr == 'close':
435 439 return self.noop
436 440 else:
437 441 return getattr(self.f, attr)
438 442
439 443 return wrappedfileobj(fp)
440 444 if util.safehasattr(pat, 'write') and writable:
441 445 return pat
442 446 if util.safehasattr(pat, 'read') and 'r' in mode:
443 447 return pat
444 448 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
445 449 if modemap is not None:
446 450 mode = modemap.get(fn, mode)
447 451 if mode == 'wb':
448 452 modemap[fn] = 'ab'
449 453 return open(fn, mode)
450 454
451 455 def openrevlog(repo, cmd, file_, opts):
452 456 """opens the changelog, manifest, a filelog or a given revlog"""
453 457 cl = opts['changelog']
454 458 mf = opts['manifest']
455 459 dir = opts['dir']
456 460 msg = None
457 461 if cl and mf:
458 462 msg = _('cannot specify --changelog and --manifest at the same time')
459 463 elif cl and dir:
460 464 msg = _('cannot specify --changelog and --dir at the same time')
461 465 elif cl or mf:
462 466 if file_:
463 467 msg = _('cannot specify filename with --changelog or --manifest')
464 468 elif not repo:
465 469 msg = _('cannot specify --changelog or --manifest or --dir '
466 470 'without a repository')
467 471 if msg:
468 472 raise util.Abort(msg)
469 473
470 474 r = None
471 475 if repo:
472 476 if cl:
473 477 r = repo.unfiltered().changelog
474 478 elif dir:
475 479 if 'treemanifest' not in repo.requirements:
476 480 raise util.Abort(_("--dir can only be used on repos with "
477 481 "treemanifest enabled"))
478 482 dirlog = repo.dirlog(file_)
479 483 if len(dirlog):
480 484 r = dirlog
481 485 elif mf:
482 486 r = repo.manifest
483 487 elif file_:
484 488 filelog = repo.file(file_)
485 489 if len(filelog):
486 490 r = filelog
487 491 if not r:
488 492 if not file_:
489 493 raise error.CommandError(cmd, _('invalid arguments'))
490 494 if not os.path.isfile(file_):
491 495 raise util.Abort(_("revlog '%s' not found") % file_)
492 496 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
493 497 file_[:-2] + ".i")
494 498 return r
495 499
496 500 def copy(ui, repo, pats, opts, rename=False):
497 501 # called with the repo lock held
498 502 #
499 503 # hgsep => pathname that uses "/" to separate directories
500 504 # ossep => pathname that uses os.sep to separate directories
501 505 cwd = repo.getcwd()
502 506 targets = {}
503 507 after = opts.get("after")
504 508 dryrun = opts.get("dry_run")
505 509 wctx = repo[None]
506 510
507 511 def walkpat(pat):
508 512 srcs = []
509 513 if after:
510 514 badstates = '?'
511 515 else:
512 516 badstates = '?r'
513 517 m = scmutil.match(repo[None], [pat], opts, globbed=True)
514 518 for abs in repo.walk(m):
515 519 state = repo.dirstate[abs]
516 520 rel = m.rel(abs)
517 521 exact = m.exact(abs)
518 522 if state in badstates:
519 523 if exact and state == '?':
520 524 ui.warn(_('%s: not copying - file is not managed\n') % rel)
521 525 if exact and state == 'r':
522 526 ui.warn(_('%s: not copying - file has been marked for'
523 527 ' remove\n') % rel)
524 528 continue
525 529 # abs: hgsep
526 530 # rel: ossep
527 531 srcs.append((abs, rel, exact))
528 532 return srcs
529 533
530 534 # abssrc: hgsep
531 535 # relsrc: ossep
532 536 # otarget: ossep
533 537 def copyfile(abssrc, relsrc, otarget, exact):
534 538 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
535 539 if '/' in abstarget:
536 540 # We cannot normalize abstarget itself, this would prevent
537 541 # case only renames, like a => A.
538 542 abspath, absname = abstarget.rsplit('/', 1)
539 543 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
540 544 reltarget = repo.pathto(abstarget, cwd)
541 545 target = repo.wjoin(abstarget)
542 546 src = repo.wjoin(abssrc)
543 547 state = repo.dirstate[abstarget]
544 548
545 549 scmutil.checkportable(ui, abstarget)
546 550
547 551 # check for collisions
548 552 prevsrc = targets.get(abstarget)
549 553 if prevsrc is not None:
550 554 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
551 555 (reltarget, repo.pathto(abssrc, cwd),
552 556 repo.pathto(prevsrc, cwd)))
553 557 return
554 558
555 559 # check for overwrites
556 560 exists = os.path.lexists(target)
557 561 samefile = False
558 562 if exists and abssrc != abstarget:
559 563 if (repo.dirstate.normalize(abssrc) ==
560 564 repo.dirstate.normalize(abstarget)):
561 565 if not rename:
562 566 ui.warn(_("%s: can't copy - same file\n") % reltarget)
563 567 return
564 568 exists = False
565 569 samefile = True
566 570
567 571 if not after and exists or after and state in 'mn':
568 572 if not opts['force']:
569 573 ui.warn(_('%s: not overwriting - file exists\n') %
570 574 reltarget)
571 575 return
572 576
573 577 if after:
574 578 if not exists:
575 579 if rename:
576 580 ui.warn(_('%s: not recording move - %s does not exist\n') %
577 581 (relsrc, reltarget))
578 582 else:
579 583 ui.warn(_('%s: not recording copy - %s does not exist\n') %
580 584 (relsrc, reltarget))
581 585 return
582 586 elif not dryrun:
583 587 try:
584 588 if exists:
585 589 os.unlink(target)
586 590 targetdir = os.path.dirname(target) or '.'
587 591 if not os.path.isdir(targetdir):
588 592 os.makedirs(targetdir)
589 593 if samefile:
590 594 tmp = target + "~hgrename"
591 595 os.rename(src, tmp)
592 596 os.rename(tmp, target)
593 597 else:
594 598 util.copyfile(src, target)
595 599 srcexists = True
596 600 except IOError, inst:
597 601 if inst.errno == errno.ENOENT:
598 602 ui.warn(_('%s: deleted in working directory\n') % relsrc)
599 603 srcexists = False
600 604 else:
601 605 ui.warn(_('%s: cannot copy - %s\n') %
602 606 (relsrc, inst.strerror))
603 607 return True # report a failure
604 608
605 609 if ui.verbose or not exact:
606 610 if rename:
607 611 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
608 612 else:
609 613 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
610 614
611 615 targets[abstarget] = abssrc
612 616
613 617 # fix up dirstate
614 618 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
615 619 dryrun=dryrun, cwd=cwd)
616 620 if rename and not dryrun:
617 621 if not after and srcexists and not samefile:
618 622 util.unlinkpath(repo.wjoin(abssrc))
619 623 wctx.forget([abssrc])
620 624
621 625 # pat: ossep
622 626 # dest ossep
623 627 # srcs: list of (hgsep, hgsep, ossep, bool)
624 628 # return: function that takes hgsep and returns ossep
625 629 def targetpathfn(pat, dest, srcs):
626 630 if os.path.isdir(pat):
627 631 abspfx = pathutil.canonpath(repo.root, cwd, pat)
628 632 abspfx = util.localpath(abspfx)
629 633 if destdirexists:
630 634 striplen = len(os.path.split(abspfx)[0])
631 635 else:
632 636 striplen = len(abspfx)
633 637 if striplen:
634 638 striplen += len(os.sep)
635 639 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
636 640 elif destdirexists:
637 641 res = lambda p: os.path.join(dest,
638 642 os.path.basename(util.localpath(p)))
639 643 else:
640 644 res = lambda p: dest
641 645 return res
642 646
643 647 # pat: ossep
644 648 # dest ossep
645 649 # srcs: list of (hgsep, hgsep, ossep, bool)
646 650 # return: function that takes hgsep and returns ossep
647 651 def targetpathafterfn(pat, dest, srcs):
648 652 if matchmod.patkind(pat):
649 653 # a mercurial pattern
650 654 res = lambda p: os.path.join(dest,
651 655 os.path.basename(util.localpath(p)))
652 656 else:
653 657 abspfx = pathutil.canonpath(repo.root, cwd, pat)
654 658 if len(abspfx) < len(srcs[0][0]):
655 659 # A directory. Either the target path contains the last
656 660 # component of the source path or it does not.
657 661 def evalpath(striplen):
658 662 score = 0
659 663 for s in srcs:
660 664 t = os.path.join(dest, util.localpath(s[0])[striplen:])
661 665 if os.path.lexists(t):
662 666 score += 1
663 667 return score
664 668
665 669 abspfx = util.localpath(abspfx)
666 670 striplen = len(abspfx)
667 671 if striplen:
668 672 striplen += len(os.sep)
669 673 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
670 674 score = evalpath(striplen)
671 675 striplen1 = len(os.path.split(abspfx)[0])
672 676 if striplen1:
673 677 striplen1 += len(os.sep)
674 678 if evalpath(striplen1) > score:
675 679 striplen = striplen1
676 680 res = lambda p: os.path.join(dest,
677 681 util.localpath(p)[striplen:])
678 682 else:
679 683 # a file
680 684 if destdirexists:
681 685 res = lambda p: os.path.join(dest,
682 686 os.path.basename(util.localpath(p)))
683 687 else:
684 688 res = lambda p: dest
685 689 return res
686 690
687 691 pats = scmutil.expandpats(pats)
688 692 if not pats:
689 693 raise util.Abort(_('no source or destination specified'))
690 694 if len(pats) == 1:
691 695 raise util.Abort(_('no destination specified'))
692 696 dest = pats.pop()
693 697 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
694 698 if not destdirexists:
695 699 if len(pats) > 1 or matchmod.patkind(pats[0]):
696 700 raise util.Abort(_('with multiple sources, destination must be an '
697 701 'existing directory'))
698 702 if util.endswithsep(dest):
699 703 raise util.Abort(_('destination %s is not a directory') % dest)
700 704
701 705 tfn = targetpathfn
702 706 if after:
703 707 tfn = targetpathafterfn
704 708 copylist = []
705 709 for pat in pats:
706 710 srcs = walkpat(pat)
707 711 if not srcs:
708 712 continue
709 713 copylist.append((tfn(pat, dest, srcs), srcs))
710 714 if not copylist:
711 715 raise util.Abort(_('no files to copy'))
712 716
713 717 errors = 0
714 718 for targetpath, srcs in copylist:
715 719 for abssrc, relsrc, exact in srcs:
716 720 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
717 721 errors += 1
718 722
719 723 if errors:
720 724 ui.warn(_('(consider using --after)\n'))
721 725
722 726 return errors != 0
723 727
724 728 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
725 729 runargs=None, appendpid=False):
726 730 '''Run a command as a service.'''
727 731
728 732 def writepid(pid):
729 733 if opts['pid_file']:
730 734 if appendpid:
731 735 mode = 'a'
732 736 else:
733 737 mode = 'w'
734 738 fp = open(opts['pid_file'], mode)
735 739 fp.write(str(pid) + '\n')
736 740 fp.close()
737 741
738 742 if opts['daemon'] and not opts['daemon_pipefds']:
739 743 # Signal child process startup with file removal
740 744 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
741 745 os.close(lockfd)
742 746 try:
743 747 if not runargs:
744 748 runargs = util.hgcmd() + sys.argv[1:]
745 749 runargs.append('--daemon-pipefds=%s' % lockpath)
746 750 # Don't pass --cwd to the child process, because we've already
747 751 # changed directory.
748 752 for i in xrange(1, len(runargs)):
749 753 if runargs[i].startswith('--cwd='):
750 754 del runargs[i]
751 755 break
752 756 elif runargs[i].startswith('--cwd'):
753 757 del runargs[i:i + 2]
754 758 break
755 759 def condfn():
756 760 return not os.path.exists(lockpath)
757 761 pid = util.rundetached(runargs, condfn)
758 762 if pid < 0:
759 763 raise util.Abort(_('child process failed to start'))
760 764 writepid(pid)
761 765 finally:
762 766 try:
763 767 os.unlink(lockpath)
764 768 except OSError, e:
765 769 if e.errno != errno.ENOENT:
766 770 raise
767 771 if parentfn:
768 772 return parentfn(pid)
769 773 else:
770 774 return
771 775
772 776 if initfn:
773 777 initfn()
774 778
775 779 if not opts['daemon']:
776 780 writepid(os.getpid())
777 781
778 782 if opts['daemon_pipefds']:
779 783 lockpath = opts['daemon_pipefds']
780 784 try:
781 785 os.setsid()
782 786 except AttributeError:
783 787 pass
784 788 os.unlink(lockpath)
785 789 util.hidewindow()
786 790 sys.stdout.flush()
787 791 sys.stderr.flush()
788 792
789 793 nullfd = os.open(os.devnull, os.O_RDWR)
790 794 logfilefd = nullfd
791 795 if logfile:
792 796 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
793 797 os.dup2(nullfd, 0)
794 798 os.dup2(logfilefd, 1)
795 799 os.dup2(logfilefd, 2)
796 800 if nullfd not in (0, 1, 2):
797 801 os.close(nullfd)
798 802 if logfile and logfilefd not in (0, 1, 2):
799 803 os.close(logfilefd)
800 804
801 805 if runfn:
802 806 return runfn()
803 807
804 808 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
805 809 """Utility function used by commands.import to import a single patch
806 810
807 811 This function is explicitly defined here to help the evolve extension to
808 812 wrap this part of the import logic.
809 813
810 814 The API is currently a bit ugly because it a simple code translation from
811 815 the import command. Feel free to make it better.
812 816
813 817 :hunk: a patch (as a binary string)
814 818 :parents: nodes that will be parent of the created commit
815 819 :opts: the full dict of option passed to the import command
816 820 :msgs: list to save commit message to.
817 821 (used in case we need to save it when failing)
818 822 :updatefunc: a function that update a repo to a given node
819 823 updatefunc(<repo>, <node>)
820 824 """
821 825 tmpname, message, user, date, branch, nodeid, p1, p2 = \
822 826 patch.extract(ui, hunk)
823 827
824 828 update = not opts.get('bypass')
825 829 strip = opts["strip"]
826 830 prefix = opts["prefix"]
827 831 sim = float(opts.get('similarity') or 0)
828 832 if not tmpname:
829 833 return (None, None, False)
830 834 msg = _('applied to working directory')
831 835
832 836 rejects = False
833 837 dsguard = None
834 838
835 839 try:
836 840 cmdline_message = logmessage(ui, opts)
837 841 if cmdline_message:
838 842 # pickup the cmdline msg
839 843 message = cmdline_message
840 844 elif message:
841 845 # pickup the patch msg
842 846 message = message.strip()
843 847 else:
844 848 # launch the editor
845 849 message = None
846 850 ui.debug('message:\n%s\n' % message)
847 851
848 852 if len(parents) == 1:
849 853 parents.append(repo[nullid])
850 854 if opts.get('exact'):
851 855 if not nodeid or not p1:
852 856 raise util.Abort(_('not a Mercurial patch'))
853 857 p1 = repo[p1]
854 858 p2 = repo[p2 or nullid]
855 859 elif p2:
856 860 try:
857 861 p1 = repo[p1]
858 862 p2 = repo[p2]
859 863 # Without any options, consider p2 only if the
860 864 # patch is being applied on top of the recorded
861 865 # first parent.
862 866 if p1 != parents[0]:
863 867 p1 = parents[0]
864 868 p2 = repo[nullid]
865 869 except error.RepoError:
866 870 p1, p2 = parents
867 871 if p2.node() == nullid:
868 872 ui.warn(_("warning: import the patch as a normal revision\n"
869 873 "(use --exact to import the patch as a merge)\n"))
870 874 else:
871 875 p1, p2 = parents
872 876
873 877 n = None
874 878 if update:
875 879 dsguard = dirstateguard(repo, 'tryimportone')
876 880 if p1 != parents[0]:
877 881 updatefunc(repo, p1.node())
878 882 if p2 != parents[1]:
879 883 repo.setparents(p1.node(), p2.node())
880 884
881 885 if opts.get('exact') or opts.get('import_branch'):
882 886 repo.dirstate.setbranch(branch or 'default')
883 887
884 888 partial = opts.get('partial', False)
885 889 files = set()
886 890 try:
887 891 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
888 892 files=files, eolmode=None, similarity=sim / 100.0)
889 893 except patch.PatchError, e:
890 894 if not partial:
891 895 raise util.Abort(str(e))
892 896 if partial:
893 897 rejects = True
894 898
895 899 files = list(files)
896 900 if opts.get('no_commit'):
897 901 if message:
898 902 msgs.append(message)
899 903 else:
900 904 if opts.get('exact') or p2:
901 905 # If you got here, you either use --force and know what
902 906 # you are doing or used --exact or a merge patch while
903 907 # being updated to its first parent.
904 908 m = None
905 909 else:
906 910 m = scmutil.matchfiles(repo, files or [])
907 911 editform = mergeeditform(repo[None], 'import.normal')
908 912 if opts.get('exact'):
909 913 editor = None
910 914 else:
911 915 editor = getcommiteditor(editform=editform, **opts)
912 916 allowemptyback = repo.ui.backupconfig('ui', 'allowemptycommit')
913 917 try:
914 918 if partial:
915 919 repo.ui.setconfig('ui', 'allowemptycommit', True)
916 920 n = repo.commit(message, opts.get('user') or user,
917 921 opts.get('date') or date, match=m,
918 922 editor=editor)
919 923 finally:
920 924 repo.ui.restoreconfig(allowemptyback)
921 925 dsguard.close()
922 926 else:
923 927 if opts.get('exact') or opts.get('import_branch'):
924 928 branch = branch or 'default'
925 929 else:
926 930 branch = p1.branch()
927 931 store = patch.filestore()
928 932 try:
929 933 files = set()
930 934 try:
931 935 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
932 936 files, eolmode=None)
933 937 except patch.PatchError, e:
934 938 raise util.Abort(str(e))
935 939 if opts.get('exact'):
936 940 editor = None
937 941 else:
938 942 editor = getcommiteditor(editform='import.bypass')
939 943 memctx = context.makememctx(repo, (p1.node(), p2.node()),
940 944 message,
941 945 opts.get('user') or user,
942 946 opts.get('date') or date,
943 947 branch, files, store,
944 948 editor=editor)
945 949 n = memctx.commit()
946 950 finally:
947 951 store.close()
948 952 if opts.get('exact') and opts.get('no_commit'):
949 953 # --exact with --no-commit is still useful in that it does merge
950 954 # and branch bits
951 955 ui.warn(_("warning: can't check exact import with --no-commit\n"))
952 956 elif opts.get('exact') and hex(n) != nodeid:
953 957 raise util.Abort(_('patch is damaged or loses information'))
954 958 if n:
955 959 # i18n: refers to a short changeset id
956 960 msg = _('created %s') % short(n)
957 961 return (msg, n, rejects)
958 962 finally:
959 963 lockmod.release(dsguard)
960 964 os.unlink(tmpname)
961 965
962 966 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
963 967 opts=None):
964 968 '''export changesets as hg patches.'''
965 969
966 970 total = len(revs)
967 971 revwidth = max([len(str(rev)) for rev in revs])
968 972 filemode = {}
969 973
970 974 def single(rev, seqno, fp):
971 975 ctx = repo[rev]
972 976 node = ctx.node()
973 977 parents = [p.node() for p in ctx.parents() if p]
974 978 branch = ctx.branch()
975 979 if switch_parent:
976 980 parents.reverse()
977 981
978 982 if parents:
979 983 prev = parents[0]
980 984 else:
981 985 prev = nullid
982 986
983 987 shouldclose = False
984 988 if not fp and len(template) > 0:
985 989 desc_lines = ctx.description().rstrip().split('\n')
986 990 desc = desc_lines[0] #Commit always has a first line.
987 991 fp = makefileobj(repo, template, node, desc=desc, total=total,
988 992 seqno=seqno, revwidth=revwidth, mode='wb',
989 993 modemap=filemode)
990 994 if fp != template:
991 995 shouldclose = True
992 996 if fp and fp != sys.stdout and util.safehasattr(fp, 'name'):
993 997 repo.ui.note("%s\n" % fp.name)
994 998
995 999 if not fp:
996 1000 write = repo.ui.write
997 1001 else:
998 1002 def write(s, **kw):
999 1003 fp.write(s)
1000 1004
1001 1005 write("# HG changeset patch\n")
1002 1006 write("# User %s\n" % ctx.user())
1003 1007 write("# Date %d %d\n" % ctx.date())
1004 1008 write("# %s\n" % util.datestr(ctx.date()))
1005 1009 if branch and branch != 'default':
1006 1010 write("# Branch %s\n" % branch)
1007 1011 write("# Node ID %s\n" % hex(node))
1008 1012 write("# Parent %s\n" % hex(prev))
1009 1013 if len(parents) > 1:
1010 1014 write("# Parent %s\n" % hex(parents[1]))
1011 1015 write(ctx.description().rstrip())
1012 1016 write("\n\n")
1013 1017
1014 1018 for chunk, label in patch.diffui(repo, prev, node, opts=opts):
1015 1019 write(chunk, label=label)
1016 1020
1017 1021 if shouldclose:
1018 1022 fp.close()
1019 1023
1020 1024 for seqno, rev in enumerate(revs):
1021 1025 single(rev, seqno + 1, fp)
1022 1026
1023 1027 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1024 1028 changes=None, stat=False, fp=None, prefix='',
1025 1029 root='', listsubrepos=False):
1026 1030 '''show diff or diffstat.'''
1027 1031 if fp is None:
1028 1032 write = ui.write
1029 1033 else:
1030 1034 def write(s, **kw):
1031 1035 fp.write(s)
1032 1036
1033 1037 if root:
1034 1038 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1035 1039 else:
1036 1040 relroot = ''
1037 1041 if relroot != '':
1038 1042 # XXX relative roots currently don't work if the root is within a
1039 1043 # subrepo
1040 1044 uirelroot = match.uipath(relroot)
1041 1045 relroot += '/'
1042 1046 for matchroot in match.files():
1043 1047 if not matchroot.startswith(relroot):
1044 1048 ui.warn(_('warning: %s not inside relative root %s\n') % (
1045 1049 match.uipath(matchroot), uirelroot))
1046 1050
1047 1051 if stat:
1048 1052 diffopts = diffopts.copy(context=0)
1049 1053 width = 80
1050 1054 if not ui.plain():
1051 1055 width = ui.termwidth()
1052 1056 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1053 1057 prefix=prefix, relroot=relroot)
1054 1058 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1055 1059 width=width,
1056 1060 git=diffopts.git):
1057 1061 write(chunk, label=label)
1058 1062 else:
1059 1063 for chunk, label in patch.diffui(repo, node1, node2, match,
1060 1064 changes, diffopts, prefix=prefix,
1061 1065 relroot=relroot):
1062 1066 write(chunk, label=label)
1063 1067
1064 1068 if listsubrepos:
1065 1069 ctx1 = repo[node1]
1066 1070 ctx2 = repo[node2]
1067 1071 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1068 1072 tempnode2 = node2
1069 1073 try:
1070 1074 if node2 is not None:
1071 1075 tempnode2 = ctx2.substate[subpath][1]
1072 1076 except KeyError:
1073 1077 # A subrepo that existed in node1 was deleted between node1 and
1074 1078 # node2 (inclusive). Thus, ctx2's substate won't contain that
1075 1079 # subpath. The best we can do is to ignore it.
1076 1080 tempnode2 = None
1077 1081 submatch = matchmod.narrowmatcher(subpath, match)
1078 1082 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1079 1083 stat=stat, fp=fp, prefix=prefix)
1080 1084
1081 1085 class changeset_printer(object):
1082 1086 '''show changeset information when templating not requested.'''
1083 1087
1084 1088 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1085 1089 self.ui = ui
1086 1090 self.repo = repo
1087 1091 self.buffered = buffered
1088 1092 self.matchfn = matchfn
1089 1093 self.diffopts = diffopts
1090 1094 self.header = {}
1091 1095 self.hunk = {}
1092 1096 self.lastheader = None
1093 1097 self.footer = None
1094 1098
1095 1099 def flush(self, rev):
1096 1100 if rev in self.header:
1097 1101 h = self.header[rev]
1098 1102 if h != self.lastheader:
1099 1103 self.lastheader = h
1100 1104 self.ui.write(h)
1101 1105 del self.header[rev]
1102 1106 if rev in self.hunk:
1103 1107 self.ui.write(self.hunk[rev])
1104 1108 del self.hunk[rev]
1105 1109 return 1
1106 1110 return 0
1107 1111
1108 1112 def close(self):
1109 1113 if self.footer:
1110 1114 self.ui.write(self.footer)
1111 1115
1112 1116 def show(self, ctx, copies=None, matchfn=None, **props):
1113 1117 if self.buffered:
1114 1118 self.ui.pushbuffer()
1115 1119 self._show(ctx, copies, matchfn, props)
1116 1120 self.hunk[ctx.rev()] = self.ui.popbuffer(labeled=True)
1117 1121 else:
1118 1122 self._show(ctx, copies, matchfn, props)
1119 1123
1120 1124 def _show(self, ctx, copies, matchfn, props):
1121 1125 '''show a single changeset or file revision'''
1122 1126 changenode = ctx.node()
1123 1127 rev = ctx.rev()
1124 1128 if self.ui.debugflag:
1125 1129 hexfunc = hex
1126 1130 else:
1127 1131 hexfunc = short
1128 1132 if rev is None:
1129 1133 pctx = ctx.p1()
1130 1134 revnode = (pctx.rev(), hexfunc(pctx.node()) + '+')
1131 1135 else:
1132 1136 revnode = (rev, hexfunc(changenode))
1133 1137
1134 1138 if self.ui.quiet:
1135 1139 self.ui.write("%d:%s\n" % revnode, label='log.node')
1136 1140 return
1137 1141
1138 1142 date = util.datestr(ctx.date())
1139 1143
1140 1144 # i18n: column positioning for "hg log"
1141 1145 self.ui.write(_("changeset: %d:%s\n") % revnode,
1142 1146 label='log.changeset changeset.%s' % ctx.phasestr())
1143 1147
1144 1148 # branches are shown first before any other names due to backwards
1145 1149 # compatibility
1146 1150 branch = ctx.branch()
1147 1151 # don't show the default branch name
1148 1152 if branch != 'default':
1149 1153 # i18n: column positioning for "hg log"
1150 1154 self.ui.write(_("branch: %s\n") % branch,
1151 1155 label='log.branch')
1152 1156
1153 1157 for name, ns in self.repo.names.iteritems():
1154 1158 # branches has special logic already handled above, so here we just
1155 1159 # skip it
1156 1160 if name == 'branches':
1157 1161 continue
1158 1162 # we will use the templatename as the color name since those two
1159 1163 # should be the same
1160 1164 for name in ns.names(self.repo, changenode):
1161 1165 self.ui.write(ns.logfmt % name,
1162 1166 label='log.%s' % ns.colorname)
1163 1167 if self.ui.debugflag:
1164 1168 # i18n: column positioning for "hg log"
1165 1169 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1166 1170 label='log.phase')
1167 1171 for pctx in self._meaningful_parentrevs(ctx):
1168 1172 label = 'log.parent changeset.%s' % pctx.phasestr()
1169 1173 # i18n: column positioning for "hg log"
1170 1174 self.ui.write(_("parent: %d:%s\n")
1171 1175 % (pctx.rev(), hexfunc(pctx.node())),
1172 1176 label=label)
1173 1177
1174 1178 if self.ui.debugflag and rev is not None:
1175 1179 mnode = ctx.manifestnode()
1176 1180 # i18n: column positioning for "hg log"
1177 1181 self.ui.write(_("manifest: %d:%s\n") %
1178 1182 (self.repo.manifest.rev(mnode), hex(mnode)),
1179 1183 label='ui.debug log.manifest')
1180 1184 # i18n: column positioning for "hg log"
1181 1185 self.ui.write(_("user: %s\n") % ctx.user(),
1182 1186 label='log.user')
1183 1187 # i18n: column positioning for "hg log"
1184 1188 self.ui.write(_("date: %s\n") % date,
1185 1189 label='log.date')
1186 1190
1187 1191 if self.ui.debugflag:
1188 1192 files = ctx.p1().status(ctx)[:3]
1189 1193 for key, value in zip([# i18n: column positioning for "hg log"
1190 1194 _("files:"),
1191 1195 # i18n: column positioning for "hg log"
1192 1196 _("files+:"),
1193 1197 # i18n: column positioning for "hg log"
1194 1198 _("files-:")], files):
1195 1199 if value:
1196 1200 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1197 1201 label='ui.debug log.files')
1198 1202 elif ctx.files() and self.ui.verbose:
1199 1203 # i18n: column positioning for "hg log"
1200 1204 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1201 1205 label='ui.note log.files')
1202 1206 if copies and self.ui.verbose:
1203 1207 copies = ['%s (%s)' % c for c in copies]
1204 1208 # i18n: column positioning for "hg log"
1205 1209 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1206 1210 label='ui.note log.copies')
1207 1211
1208 1212 extra = ctx.extra()
1209 1213 if extra and self.ui.debugflag:
1210 1214 for key, value in sorted(extra.items()):
1211 1215 # i18n: column positioning for "hg log"
1212 1216 self.ui.write(_("extra: %s=%s\n")
1213 1217 % (key, value.encode('string_escape')),
1214 1218 label='ui.debug log.extra')
1215 1219
1216 1220 description = ctx.description().strip()
1217 1221 if description:
1218 1222 if self.ui.verbose:
1219 1223 self.ui.write(_("description:\n"),
1220 1224 label='ui.note log.description')
1221 1225 self.ui.write(description,
1222 1226 label='ui.note log.description')
1223 1227 self.ui.write("\n\n")
1224 1228 else:
1225 1229 # i18n: column positioning for "hg log"
1226 1230 self.ui.write(_("summary: %s\n") %
1227 1231 description.splitlines()[0],
1228 1232 label='log.summary')
1229 1233 self.ui.write("\n")
1230 1234
1231 1235 self.showpatch(changenode, matchfn)
1232 1236
1233 1237 def showpatch(self, node, matchfn):
1234 1238 if not matchfn:
1235 1239 matchfn = self.matchfn
1236 1240 if matchfn:
1237 1241 stat = self.diffopts.get('stat')
1238 1242 diff = self.diffopts.get('patch')
1239 1243 diffopts = patch.diffallopts(self.ui, self.diffopts)
1240 1244 prev = self.repo.changelog.parents(node)[0]
1241 1245 if stat:
1242 1246 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1243 1247 match=matchfn, stat=True)
1244 1248 if diff:
1245 1249 if stat:
1246 1250 self.ui.write("\n")
1247 1251 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1248 1252 match=matchfn, stat=False)
1249 1253 self.ui.write("\n")
1250 1254
1251 1255 def _meaningful_parentrevs(self, ctx):
1252 1256 """Return list of meaningful (or all if debug) parentrevs for rev.
1253 1257
1254 1258 For merges (two non-nullrev revisions) both parents are meaningful.
1255 1259 Otherwise the first parent revision is considered meaningful if it
1256 1260 is not the preceding revision.
1257 1261 """
1258 1262 parents = ctx.parents()
1259 1263 if len(parents) > 1:
1260 1264 return parents
1261 1265 if self.ui.debugflag:
1262 1266 return [parents[0], self.repo['null']]
1263 1267 if parents[0].rev() >= scmutil.intrev(self.repo, ctx.rev()) - 1:
1264 1268 return []
1265 1269 return parents
1266 1270
1267 1271 class jsonchangeset(changeset_printer):
1268 1272 '''format changeset information.'''
1269 1273
1270 1274 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1271 1275 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1272 1276 self.cache = {}
1273 1277 self._first = True
1274 1278
1275 1279 def close(self):
1276 1280 if not self._first:
1277 1281 self.ui.write("\n]\n")
1278 1282 else:
1279 1283 self.ui.write("[]\n")
1280 1284
1281 1285 def _show(self, ctx, copies, matchfn, props):
1282 1286 '''show a single changeset or file revision'''
1283 1287 rev = ctx.rev()
1284 1288 if rev is None:
1285 1289 jrev = jnode = 'null'
1286 1290 else:
1287 1291 jrev = str(rev)
1288 1292 jnode = '"%s"' % hex(ctx.node())
1289 1293 j = encoding.jsonescape
1290 1294
1291 1295 if self._first:
1292 1296 self.ui.write("[\n {")
1293 1297 self._first = False
1294 1298 else:
1295 1299 self.ui.write(",\n {")
1296 1300
1297 1301 if self.ui.quiet:
1298 1302 self.ui.write('\n "rev": %s' % jrev)
1299 1303 self.ui.write(',\n "node": %s' % jnode)
1300 1304 self.ui.write('\n }')
1301 1305 return
1302 1306
1303 1307 self.ui.write('\n "rev": %s' % jrev)
1304 1308 self.ui.write(',\n "node": %s' % jnode)
1305 1309 self.ui.write(',\n "branch": "%s"' % j(ctx.branch()))
1306 1310 self.ui.write(',\n "phase": "%s"' % ctx.phasestr())
1307 1311 self.ui.write(',\n "user": "%s"' % j(ctx.user()))
1308 1312 self.ui.write(',\n "date": [%d, %d]' % ctx.date())
1309 1313 self.ui.write(',\n "desc": "%s"' % j(ctx.description()))
1310 1314
1311 1315 self.ui.write(',\n "bookmarks": [%s]' %
1312 1316 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1313 1317 self.ui.write(',\n "tags": [%s]' %
1314 1318 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1315 1319 self.ui.write(',\n "parents": [%s]' %
1316 1320 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1317 1321
1318 1322 if self.ui.debugflag:
1319 1323 if rev is None:
1320 1324 jmanifestnode = 'null'
1321 1325 else:
1322 1326 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1323 1327 self.ui.write(',\n "manifest": %s' % jmanifestnode)
1324 1328
1325 1329 self.ui.write(',\n "extra": {%s}' %
1326 1330 ", ".join('"%s": "%s"' % (j(k), j(v))
1327 1331 for k, v in ctx.extra().items()))
1328 1332
1329 1333 files = ctx.p1().status(ctx)
1330 1334 self.ui.write(',\n "modified": [%s]' %
1331 1335 ", ".join('"%s"' % j(f) for f in files[0]))
1332 1336 self.ui.write(',\n "added": [%s]' %
1333 1337 ", ".join('"%s"' % j(f) for f in files[1]))
1334 1338 self.ui.write(',\n "removed": [%s]' %
1335 1339 ", ".join('"%s"' % j(f) for f in files[2]))
1336 1340
1337 1341 elif self.ui.verbose:
1338 1342 self.ui.write(',\n "files": [%s]' %
1339 1343 ", ".join('"%s"' % j(f) for f in ctx.files()))
1340 1344
1341 1345 if copies:
1342 1346 self.ui.write(',\n "copies": {%s}' %
1343 1347 ", ".join('"%s": "%s"' % (j(k), j(v))
1344 1348 for k, v in copies))
1345 1349
1346 1350 matchfn = self.matchfn
1347 1351 if matchfn:
1348 1352 stat = self.diffopts.get('stat')
1349 1353 diff = self.diffopts.get('patch')
1350 1354 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1351 1355 node, prev = ctx.node(), ctx.p1().node()
1352 1356 if stat:
1353 1357 self.ui.pushbuffer()
1354 1358 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1355 1359 match=matchfn, stat=True)
1356 1360 self.ui.write(',\n "diffstat": "%s"' % j(self.ui.popbuffer()))
1357 1361 if diff:
1358 1362 self.ui.pushbuffer()
1359 1363 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1360 1364 match=matchfn, stat=False)
1361 1365 self.ui.write(',\n "diff": "%s"' % j(self.ui.popbuffer()))
1362 1366
1363 1367 self.ui.write("\n }")
1364 1368
1365 1369 class changeset_templater(changeset_printer):
1366 1370 '''format changeset information.'''
1367 1371
1368 1372 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1369 1373 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1370 1374 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
1371 1375 defaulttempl = {
1372 1376 'parent': '{rev}:{node|formatnode} ',
1373 1377 'manifest': '{rev}:{node|formatnode}',
1374 1378 'file_copy': '{name} ({source})',
1375 1379 'extra': '{key}={value|stringescape}'
1376 1380 }
1377 1381 # filecopy is preserved for compatibility reasons
1378 1382 defaulttempl['filecopy'] = defaulttempl['file_copy']
1379 1383 self.t = templater.templater(mapfile, {'formatnode': formatnode},
1380 1384 cache=defaulttempl)
1381 1385 if tmpl:
1382 1386 self.t.cache['changeset'] = tmpl
1383 1387
1384 1388 self.cache = {}
1385 1389
1386 1390 def _show(self, ctx, copies, matchfn, props):
1387 1391 '''show a single changeset or file revision'''
1388 1392
1389 1393 showlist = templatekw.showlist
1390 1394
1391 1395 # showparents() behaviour depends on ui trace level which
1392 1396 # causes unexpected behaviours at templating level and makes
1393 1397 # it harder to extract it in a standalone function. Its
1394 1398 # behaviour cannot be changed so leave it here for now.
1395 1399 def showparents(**args):
1396 1400 ctx = args['ctx']
1397 1401 parents = [[('rev', p.rev()),
1398 1402 ('node', p.hex()),
1399 1403 ('phase', p.phasestr())]
1400 1404 for p in self._meaningful_parentrevs(ctx)]
1401 1405 return showlist('parent', parents, **args)
1402 1406
1403 1407 props = props.copy()
1404 1408 props.update(templatekw.keywords)
1405 1409 props['parents'] = showparents
1406 1410 props['templ'] = self.t
1407 1411 props['ctx'] = ctx
1408 1412 props['repo'] = self.repo
1409 1413 props['revcache'] = {'copies': copies}
1410 1414 props['cache'] = self.cache
1411 1415
1412 1416 # find correct templates for current mode
1413 1417
1414 1418 tmplmodes = [
1415 1419 (True, None),
1416 1420 (self.ui.verbose, 'verbose'),
1417 1421 (self.ui.quiet, 'quiet'),
1418 1422 (self.ui.debugflag, 'debug'),
1419 1423 ]
1420 1424
1421 1425 types = {'header': '', 'footer':'', 'changeset': 'changeset'}
1422 1426 for mode, postfix in tmplmodes:
1423 1427 for type in types:
1424 1428 cur = postfix and ('%s_%s' % (type, postfix)) or type
1425 1429 if mode and cur in self.t:
1426 1430 types[type] = cur
1427 1431
1428 1432 try:
1429 1433
1430 1434 # write header
1431 1435 if types['header']:
1432 1436 h = templater.stringify(self.t(types['header'], **props))
1433 1437 if self.buffered:
1434 1438 self.header[ctx.rev()] = h
1435 1439 else:
1436 1440 if self.lastheader != h:
1437 1441 self.lastheader = h
1438 1442 self.ui.write(h)
1439 1443
1440 1444 # write changeset metadata, then patch if requested
1441 1445 key = types['changeset']
1442 1446 self.ui.write(templater.stringify(self.t(key, **props)))
1443 1447 self.showpatch(ctx.node(), matchfn)
1444 1448
1445 1449 if types['footer']:
1446 1450 if not self.footer:
1447 1451 self.footer = templater.stringify(self.t(types['footer'],
1448 1452 **props))
1449 1453
1450 1454 except KeyError, inst:
1451 1455 msg = _("%s: no key named '%s'")
1452 1456 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
1453 1457 except SyntaxError, inst:
1454 1458 raise util.Abort('%s: %s' % (self.t.mapfile, inst.args[0]))
1455 1459
1456 1460 def gettemplate(ui, tmpl, style):
1457 1461 """
1458 1462 Find the template matching the given template spec or style.
1459 1463 """
1460 1464
1461 1465 # ui settings
1462 1466 if not tmpl and not style: # template are stronger than style
1463 1467 tmpl = ui.config('ui', 'logtemplate')
1464 1468 if tmpl:
1465 1469 try:
1466 1470 tmpl = templater.unquotestring(tmpl)
1467 1471 except SyntaxError:
1468 1472 pass
1469 1473 return tmpl, None
1470 1474 else:
1471 1475 style = util.expandpath(ui.config('ui', 'style', ''))
1472 1476
1473 1477 if not tmpl and style:
1474 1478 mapfile = style
1475 1479 if not os.path.split(mapfile)[0]:
1476 1480 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1477 1481 or templater.templatepath(mapfile))
1478 1482 if mapname:
1479 1483 mapfile = mapname
1480 1484 return None, mapfile
1481 1485
1482 1486 if not tmpl:
1483 1487 return None, None
1484 1488
1485 1489 # looks like a literal template?
1486 1490 if '{' in tmpl:
1487 1491 return tmpl, None
1488 1492
1489 1493 # perhaps a stock style?
1490 1494 if not os.path.split(tmpl)[0]:
1491 1495 mapname = (templater.templatepath('map-cmdline.' + tmpl)
1492 1496 or templater.templatepath(tmpl))
1493 1497 if mapname and os.path.isfile(mapname):
1494 1498 return None, mapname
1495 1499
1496 1500 # perhaps it's a reference to [templates]
1497 1501 t = ui.config('templates', tmpl)
1498 1502 if t:
1499 1503 try:
1500 1504 tmpl = templater.unquotestring(t)
1501 1505 except SyntaxError:
1502 1506 tmpl = t
1503 1507 return tmpl, None
1504 1508
1505 1509 if tmpl == 'list':
1506 1510 ui.write(_("available styles: %s\n") % templater.stylelist())
1507 1511 raise util.Abort(_("specify a template"))
1508 1512
1509 1513 # perhaps it's a path to a map or a template
1510 1514 if ('/' in tmpl or '\\' in tmpl) and os.path.isfile(tmpl):
1511 1515 # is it a mapfile for a style?
1512 1516 if os.path.basename(tmpl).startswith("map-"):
1513 1517 return None, os.path.realpath(tmpl)
1514 1518 tmpl = open(tmpl).read()
1515 1519 return tmpl, None
1516 1520
1517 1521 # constant string?
1518 1522 return tmpl, None
1519 1523
1520 1524 def show_changeset(ui, repo, opts, buffered=False):
1521 1525 """show one changeset using template or regular display.
1522 1526
1523 1527 Display format will be the first non-empty hit of:
1524 1528 1. option 'template'
1525 1529 2. option 'style'
1526 1530 3. [ui] setting 'logtemplate'
1527 1531 4. [ui] setting 'style'
1528 1532 If all of these values are either the unset or the empty string,
1529 1533 regular display via changeset_printer() is done.
1530 1534 """
1531 1535 # options
1532 1536 matchfn = None
1533 1537 if opts.get('patch') or opts.get('stat'):
1534 1538 matchfn = scmutil.matchall(repo)
1535 1539
1536 1540 if opts.get('template') == 'json':
1537 1541 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1538 1542
1539 1543 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1540 1544
1541 1545 if not tmpl and not mapfile:
1542 1546 return changeset_printer(ui, repo, matchfn, opts, buffered)
1543 1547
1544 1548 try:
1545 1549 t = changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile,
1546 1550 buffered)
1547 1551 except SyntaxError, inst:
1548 1552 raise util.Abort(inst.args[0])
1549 1553 return t
1550 1554
1551 1555 def showmarker(ui, marker):
1552 1556 """utility function to display obsolescence marker in a readable way
1553 1557
1554 1558 To be used by debug function."""
1555 1559 ui.write(hex(marker.precnode()))
1556 1560 for repl in marker.succnodes():
1557 1561 ui.write(' ')
1558 1562 ui.write(hex(repl))
1559 1563 ui.write(' %X ' % marker.flags())
1560 1564 parents = marker.parentnodes()
1561 1565 if parents is not None:
1562 1566 ui.write('{%s} ' % ', '.join(hex(p) for p in parents))
1563 1567 ui.write('(%s) ' % util.datestr(marker.date()))
1564 1568 ui.write('{%s}' % (', '.join('%r: %r' % t for t in
1565 1569 sorted(marker.metadata().items())
1566 1570 if t[0] != 'date')))
1567 1571 ui.write('\n')
1568 1572
1569 1573 def finddate(ui, repo, date):
1570 1574 """Find the tipmost changeset that matches the given date spec"""
1571 1575
1572 1576 df = util.matchdate(date)
1573 1577 m = scmutil.matchall(repo)
1574 1578 results = {}
1575 1579
1576 1580 def prep(ctx, fns):
1577 1581 d = ctx.date()
1578 1582 if df(d[0]):
1579 1583 results[ctx.rev()] = d
1580 1584
1581 1585 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1582 1586 rev = ctx.rev()
1583 1587 if rev in results:
1584 1588 ui.status(_("found revision %s from %s\n") %
1585 1589 (rev, util.datestr(results[rev])))
1586 1590 return str(rev)
1587 1591
1588 1592 raise util.Abort(_("revision matching date not found"))
1589 1593
1590 1594 def increasingwindows(windowsize=8, sizelimit=512):
1591 1595 while True:
1592 1596 yield windowsize
1593 1597 if windowsize < sizelimit:
1594 1598 windowsize *= 2
1595 1599
1596 1600 class FileWalkError(Exception):
1597 1601 pass
1598 1602
1599 1603 def walkfilerevs(repo, match, follow, revs, fncache):
1600 1604 '''Walks the file history for the matched files.
1601 1605
1602 1606 Returns the changeset revs that are involved in the file history.
1603 1607
1604 1608 Throws FileWalkError if the file history can't be walked using
1605 1609 filelogs alone.
1606 1610 '''
1607 1611 wanted = set()
1608 1612 copies = []
1609 1613 minrev, maxrev = min(revs), max(revs)
1610 1614 def filerevgen(filelog, last):
1611 1615 """
1612 1616 Only files, no patterns. Check the history of each file.
1613 1617
1614 1618 Examines filelog entries within minrev, maxrev linkrev range
1615 1619 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1616 1620 tuples in backwards order
1617 1621 """
1618 1622 cl_count = len(repo)
1619 1623 revs = []
1620 1624 for j in xrange(0, last + 1):
1621 1625 linkrev = filelog.linkrev(j)
1622 1626 if linkrev < minrev:
1623 1627 continue
1624 1628 # only yield rev for which we have the changelog, it can
1625 1629 # happen while doing "hg log" during a pull or commit
1626 1630 if linkrev >= cl_count:
1627 1631 break
1628 1632
1629 1633 parentlinkrevs = []
1630 1634 for p in filelog.parentrevs(j):
1631 1635 if p != nullrev:
1632 1636 parentlinkrevs.append(filelog.linkrev(p))
1633 1637 n = filelog.node(j)
1634 1638 revs.append((linkrev, parentlinkrevs,
1635 1639 follow and filelog.renamed(n)))
1636 1640
1637 1641 return reversed(revs)
1638 1642 def iterfiles():
1639 1643 pctx = repo['.']
1640 1644 for filename in match.files():
1641 1645 if follow:
1642 1646 if filename not in pctx:
1643 1647 raise util.Abort(_('cannot follow file not in parent '
1644 1648 'revision: "%s"') % filename)
1645 1649 yield filename, pctx[filename].filenode()
1646 1650 else:
1647 1651 yield filename, None
1648 1652 for filename_node in copies:
1649 1653 yield filename_node
1650 1654
1651 1655 for file_, node in iterfiles():
1652 1656 filelog = repo.file(file_)
1653 1657 if not len(filelog):
1654 1658 if node is None:
1655 1659 # A zero count may be a directory or deleted file, so
1656 1660 # try to find matching entries on the slow path.
1657 1661 if follow:
1658 1662 raise util.Abort(
1659 1663 _('cannot follow nonexistent file: "%s"') % file_)
1660 1664 raise FileWalkError("Cannot walk via filelog")
1661 1665 else:
1662 1666 continue
1663 1667
1664 1668 if node is None:
1665 1669 last = len(filelog) - 1
1666 1670 else:
1667 1671 last = filelog.rev(node)
1668 1672
1669 1673 # keep track of all ancestors of the file
1670 1674 ancestors = set([filelog.linkrev(last)])
1671 1675
1672 1676 # iterate from latest to oldest revision
1673 1677 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1674 1678 if not follow:
1675 1679 if rev > maxrev:
1676 1680 continue
1677 1681 else:
1678 1682 # Note that last might not be the first interesting
1679 1683 # rev to us:
1680 1684 # if the file has been changed after maxrev, we'll
1681 1685 # have linkrev(last) > maxrev, and we still need
1682 1686 # to explore the file graph
1683 1687 if rev not in ancestors:
1684 1688 continue
1685 1689 # XXX insert 1327 fix here
1686 1690 if flparentlinkrevs:
1687 1691 ancestors.update(flparentlinkrevs)
1688 1692
1689 1693 fncache.setdefault(rev, []).append(file_)
1690 1694 wanted.add(rev)
1691 1695 if copied:
1692 1696 copies.append(copied)
1693 1697
1694 1698 return wanted
1695 1699
1696 1700 class _followfilter(object):
1697 1701 def __init__(self, repo, onlyfirst=False):
1698 1702 self.repo = repo
1699 1703 self.startrev = nullrev
1700 1704 self.roots = set()
1701 1705 self.onlyfirst = onlyfirst
1702 1706
1703 1707 def match(self, rev):
1704 1708 def realparents(rev):
1705 1709 if self.onlyfirst:
1706 1710 return self.repo.changelog.parentrevs(rev)[0:1]
1707 1711 else:
1708 1712 return filter(lambda x: x != nullrev,
1709 1713 self.repo.changelog.parentrevs(rev))
1710 1714
1711 1715 if self.startrev == nullrev:
1712 1716 self.startrev = rev
1713 1717 return True
1714 1718
1715 1719 if rev > self.startrev:
1716 1720 # forward: all descendants
1717 1721 if not self.roots:
1718 1722 self.roots.add(self.startrev)
1719 1723 for parent in realparents(rev):
1720 1724 if parent in self.roots:
1721 1725 self.roots.add(rev)
1722 1726 return True
1723 1727 else:
1724 1728 # backwards: all parents
1725 1729 if not self.roots:
1726 1730 self.roots.update(realparents(self.startrev))
1727 1731 if rev in self.roots:
1728 1732 self.roots.remove(rev)
1729 1733 self.roots.update(realparents(rev))
1730 1734 return True
1731 1735
1732 1736 return False
1733 1737
1734 1738 def walkchangerevs(repo, match, opts, prepare):
1735 1739 '''Iterate over files and the revs in which they changed.
1736 1740
1737 1741 Callers most commonly need to iterate backwards over the history
1738 1742 in which they are interested. Doing so has awful (quadratic-looking)
1739 1743 performance, so we use iterators in a "windowed" way.
1740 1744
1741 1745 We walk a window of revisions in the desired order. Within the
1742 1746 window, we first walk forwards to gather data, then in the desired
1743 1747 order (usually backwards) to display it.
1744 1748
1745 1749 This function returns an iterator yielding contexts. Before
1746 1750 yielding each context, the iterator will first call the prepare
1747 1751 function on each context in the window in forward order.'''
1748 1752
1749 1753 follow = opts.get('follow') or opts.get('follow_first')
1750 1754 revs = _logrevs(repo, opts)
1751 1755 if not revs:
1752 1756 return []
1753 1757 wanted = set()
1754 1758 slowpath = match.anypats() or (match.files() and opts.get('removed'))
1755 1759 fncache = {}
1756 1760 change = repo.changectx
1757 1761
1758 1762 # First step is to fill wanted, the set of revisions that we want to yield.
1759 1763 # When it does not induce extra cost, we also fill fncache for revisions in
1760 1764 # wanted: a cache of filenames that were changed (ctx.files()) and that
1761 1765 # match the file filtering conditions.
1762 1766
1763 1767 if match.always():
1764 1768 # No files, no patterns. Display all revs.
1765 1769 wanted = revs
1766 1770
1767 1771 if not slowpath and match.files():
1768 1772 # We only have to read through the filelog to find wanted revisions
1769 1773
1770 1774 try:
1771 1775 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1772 1776 except FileWalkError:
1773 1777 slowpath = True
1774 1778
1775 1779 # We decided to fall back to the slowpath because at least one
1776 1780 # of the paths was not a file. Check to see if at least one of them
1777 1781 # existed in history, otherwise simply return
1778 1782 for path in match.files():
1779 1783 if path == '.' or path in repo.store:
1780 1784 break
1781 1785 else:
1782 1786 return []
1783 1787
1784 1788 if slowpath:
1785 1789 # We have to read the changelog to match filenames against
1786 1790 # changed files
1787 1791
1788 1792 if follow:
1789 1793 raise util.Abort(_('can only follow copies/renames for explicit '
1790 1794 'filenames'))
1791 1795
1792 1796 # The slow path checks files modified in every changeset.
1793 1797 # This is really slow on large repos, so compute the set lazily.
1794 1798 class lazywantedset(object):
1795 1799 def __init__(self):
1796 1800 self.set = set()
1797 1801 self.revs = set(revs)
1798 1802
1799 1803 # No need to worry about locality here because it will be accessed
1800 1804 # in the same order as the increasing window below.
1801 1805 def __contains__(self, value):
1802 1806 if value in self.set:
1803 1807 return True
1804 1808 elif not value in self.revs:
1805 1809 return False
1806 1810 else:
1807 1811 self.revs.discard(value)
1808 1812 ctx = change(value)
1809 1813 matches = filter(match, ctx.files())
1810 1814 if matches:
1811 1815 fncache[value] = matches
1812 1816 self.set.add(value)
1813 1817 return True
1814 1818 return False
1815 1819
1816 1820 def discard(self, value):
1817 1821 self.revs.discard(value)
1818 1822 self.set.discard(value)
1819 1823
1820 1824 wanted = lazywantedset()
1821 1825
1822 1826 # it might be worthwhile to do this in the iterator if the rev range
1823 1827 # is descending and the prune args are all within that range
1824 1828 for rev in opts.get('prune', ()):
1825 1829 rev = repo[rev].rev()
1826 1830 ff = _followfilter(repo)
1827 1831 stop = min(revs[0], revs[-1])
1828 1832 for x in xrange(rev, stop - 1, -1):
1829 1833 if ff.match(x):
1830 1834 wanted = wanted - [x]
1831 1835
1832 1836 # Now that wanted is correctly initialized, we can iterate over the
1833 1837 # revision range, yielding only revisions in wanted.
1834 1838 def iterate():
1835 1839 if follow and not match.files():
1836 1840 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1837 1841 def want(rev):
1838 1842 return ff.match(rev) and rev in wanted
1839 1843 else:
1840 1844 def want(rev):
1841 1845 return rev in wanted
1842 1846
1843 1847 it = iter(revs)
1844 1848 stopiteration = False
1845 1849 for windowsize in increasingwindows():
1846 1850 nrevs = []
1847 1851 for i in xrange(windowsize):
1848 1852 rev = next(it, None)
1849 1853 if rev is None:
1850 1854 stopiteration = True
1851 1855 break
1852 1856 elif want(rev):
1853 1857 nrevs.append(rev)
1854 1858 for rev in sorted(nrevs):
1855 1859 fns = fncache.get(rev)
1856 1860 ctx = change(rev)
1857 1861 if not fns:
1858 1862 def fns_generator():
1859 1863 for f in ctx.files():
1860 1864 if match(f):
1861 1865 yield f
1862 1866 fns = fns_generator()
1863 1867 prepare(ctx, fns)
1864 1868 for rev in nrevs:
1865 1869 yield change(rev)
1866 1870
1867 1871 if stopiteration:
1868 1872 break
1869 1873
1870 1874 return iterate()
1871 1875
1872 1876 def _makefollowlogfilematcher(repo, files, followfirst):
1873 1877 # When displaying a revision with --patch --follow FILE, we have
1874 1878 # to know which file of the revision must be diffed. With
1875 1879 # --follow, we want the names of the ancestors of FILE in the
1876 1880 # revision, stored in "fcache". "fcache" is populated by
1877 1881 # reproducing the graph traversal already done by --follow revset
1878 1882 # and relating linkrevs to file names (which is not "correct" but
1879 1883 # good enough).
1880 1884 fcache = {}
1881 1885 fcacheready = [False]
1882 1886 pctx = repo['.']
1883 1887
1884 1888 def populate():
1885 1889 for fn in files:
1886 1890 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
1887 1891 for c in i:
1888 1892 fcache.setdefault(c.linkrev(), set()).add(c.path())
1889 1893
1890 1894 def filematcher(rev):
1891 1895 if not fcacheready[0]:
1892 1896 # Lazy initialization
1893 1897 fcacheready[0] = True
1894 1898 populate()
1895 1899 return scmutil.matchfiles(repo, fcache.get(rev, []))
1896 1900
1897 1901 return filematcher
1898 1902
1899 1903 def _makenofollowlogfilematcher(repo, pats, opts):
1900 1904 '''hook for extensions to override the filematcher for non-follow cases'''
1901 1905 return None
1902 1906
1903 1907 def _makelogrevset(repo, pats, opts, revs):
1904 1908 """Return (expr, filematcher) where expr is a revset string built
1905 1909 from log options and file patterns or None. If --stat or --patch
1906 1910 are not passed filematcher is None. Otherwise it is a callable
1907 1911 taking a revision number and returning a match objects filtering
1908 1912 the files to be detailed when displaying the revision.
1909 1913 """
1910 1914 opt2revset = {
1911 1915 'no_merges': ('not merge()', None),
1912 1916 'only_merges': ('merge()', None),
1913 1917 '_ancestors': ('ancestors(%(val)s)', None),
1914 1918 '_fancestors': ('_firstancestors(%(val)s)', None),
1915 1919 '_descendants': ('descendants(%(val)s)', None),
1916 1920 '_fdescendants': ('_firstdescendants(%(val)s)', None),
1917 1921 '_matchfiles': ('_matchfiles(%(val)s)', None),
1918 1922 'date': ('date(%(val)r)', None),
1919 1923 'branch': ('branch(%(val)r)', ' or '),
1920 1924 '_patslog': ('filelog(%(val)r)', ' or '),
1921 1925 '_patsfollow': ('follow(%(val)r)', ' or '),
1922 1926 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
1923 1927 'keyword': ('keyword(%(val)r)', ' or '),
1924 1928 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
1925 1929 'user': ('user(%(val)r)', ' or '),
1926 1930 }
1927 1931
1928 1932 opts = dict(opts)
1929 1933 # follow or not follow?
1930 1934 follow = opts.get('follow') or opts.get('follow_first')
1931 1935 if opts.get('follow_first'):
1932 1936 followfirst = 1
1933 1937 else:
1934 1938 followfirst = 0
1935 1939 # --follow with FILE behaviour depends on revs...
1936 1940 it = iter(revs)
1937 1941 startrev = it.next()
1938 1942 followdescendants = startrev < next(it, startrev)
1939 1943
1940 1944 # branch and only_branch are really aliases and must be handled at
1941 1945 # the same time
1942 1946 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
1943 1947 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
1944 1948 # pats/include/exclude are passed to match.match() directly in
1945 1949 # _matchfiles() revset but walkchangerevs() builds its matcher with
1946 1950 # scmutil.match(). The difference is input pats are globbed on
1947 1951 # platforms without shell expansion (windows).
1948 1952 wctx = repo[None]
1949 1953 match, pats = scmutil.matchandpats(wctx, pats, opts)
1950 1954 slowpath = match.anypats() or (match.files() and opts.get('removed'))
1951 1955 if not slowpath:
1952 1956 for f in match.files():
1953 1957 if follow and f not in wctx:
1954 1958 # If the file exists, it may be a directory, so let it
1955 1959 # take the slow path.
1956 1960 if os.path.exists(repo.wjoin(f)):
1957 1961 slowpath = True
1958 1962 continue
1959 1963 else:
1960 1964 raise util.Abort(_('cannot follow file not in parent '
1961 1965 'revision: "%s"') % f)
1962 1966 filelog = repo.file(f)
1963 1967 if not filelog:
1964 1968 # A zero count may be a directory or deleted file, so
1965 1969 # try to find matching entries on the slow path.
1966 1970 if follow:
1967 1971 raise util.Abort(
1968 1972 _('cannot follow nonexistent file: "%s"') % f)
1969 1973 slowpath = True
1970 1974
1971 1975 # We decided to fall back to the slowpath because at least one
1972 1976 # of the paths was not a file. Check to see if at least one of them
1973 1977 # existed in history - in that case, we'll continue down the
1974 1978 # slowpath; otherwise, we can turn off the slowpath
1975 1979 if slowpath:
1976 1980 for path in match.files():
1977 1981 if path == '.' or path in repo.store:
1978 1982 break
1979 1983 else:
1980 1984 slowpath = False
1981 1985
1982 1986 fpats = ('_patsfollow', '_patsfollowfirst')
1983 1987 fnopats = (('_ancestors', '_fancestors'),
1984 1988 ('_descendants', '_fdescendants'))
1985 1989 if slowpath:
1986 1990 # See walkchangerevs() slow path.
1987 1991 #
1988 1992 # pats/include/exclude cannot be represented as separate
1989 1993 # revset expressions as their filtering logic applies at file
1990 1994 # level. For instance "-I a -X a" matches a revision touching
1991 1995 # "a" and "b" while "file(a) and not file(b)" does
1992 1996 # not. Besides, filesets are evaluated against the working
1993 1997 # directory.
1994 1998 matchargs = ['r:', 'd:relpath']
1995 1999 for p in pats:
1996 2000 matchargs.append('p:' + p)
1997 2001 for p in opts.get('include', []):
1998 2002 matchargs.append('i:' + p)
1999 2003 for p in opts.get('exclude', []):
2000 2004 matchargs.append('x:' + p)
2001 2005 matchargs = ','.join(('%r' % p) for p in matchargs)
2002 2006 opts['_matchfiles'] = matchargs
2003 2007 if follow:
2004 2008 opts[fnopats[0][followfirst]] = '.'
2005 2009 else:
2006 2010 if follow:
2007 2011 if pats:
2008 2012 # follow() revset interprets its file argument as a
2009 2013 # manifest entry, so use match.files(), not pats.
2010 2014 opts[fpats[followfirst]] = list(match.files())
2011 2015 else:
2012 2016 op = fnopats[followdescendants][followfirst]
2013 2017 opts[op] = 'rev(%d)' % startrev
2014 2018 else:
2015 2019 opts['_patslog'] = list(pats)
2016 2020
2017 2021 filematcher = None
2018 2022 if opts.get('patch') or opts.get('stat'):
2019 2023 # When following files, track renames via a special matcher.
2020 2024 # If we're forced to take the slowpath it means we're following
2021 2025 # at least one pattern/directory, so don't bother with rename tracking.
2022 2026 if follow and not match.always() and not slowpath:
2023 2027 # _makefollowlogfilematcher expects its files argument to be
2024 2028 # relative to the repo root, so use match.files(), not pats.
2025 2029 filematcher = _makefollowlogfilematcher(repo, match.files(),
2026 2030 followfirst)
2027 2031 else:
2028 2032 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2029 2033 if filematcher is None:
2030 2034 filematcher = lambda rev: match
2031 2035
2032 2036 expr = []
2033 2037 for op, val in sorted(opts.iteritems()):
2034 2038 if not val:
2035 2039 continue
2036 2040 if op not in opt2revset:
2037 2041 continue
2038 2042 revop, andor = opt2revset[op]
2039 2043 if '%(val)' not in revop:
2040 2044 expr.append(revop)
2041 2045 else:
2042 2046 if not isinstance(val, list):
2043 2047 e = revop % {'val': val}
2044 2048 else:
2045 2049 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2046 2050 expr.append(e)
2047 2051
2048 2052 if expr:
2049 2053 expr = '(' + ' and '.join(expr) + ')'
2050 2054 else:
2051 2055 expr = None
2052 2056 return expr, filematcher
2053 2057
2054 2058 def _logrevs(repo, opts):
2055 2059 # Default --rev value depends on --follow but --follow behaviour
2056 2060 # depends on revisions resolved from --rev...
2057 2061 follow = opts.get('follow') or opts.get('follow_first')
2058 2062 if opts.get('rev'):
2059 2063 revs = scmutil.revrange(repo, opts['rev'])
2060 2064 elif follow and repo.dirstate.p1() == nullid:
2061 2065 revs = revset.baseset()
2062 2066 elif follow:
2063 2067 revs = repo.revs('reverse(:.)')
2064 2068 else:
2065 2069 revs = revset.spanset(repo)
2066 2070 revs.reverse()
2067 2071 return revs
2068 2072
2069 2073 def getgraphlogrevs(repo, pats, opts):
2070 2074 """Return (revs, expr, filematcher) where revs is an iterable of
2071 2075 revision numbers, expr is a revset string built from log options
2072 2076 and file patterns or None, and used to filter 'revs'. If --stat or
2073 2077 --patch are not passed filematcher is None. Otherwise it is a
2074 2078 callable taking a revision number and returning a match objects
2075 2079 filtering the files to be detailed when displaying the revision.
2076 2080 """
2077 2081 limit = loglimit(opts)
2078 2082 revs = _logrevs(repo, opts)
2079 2083 if not revs:
2080 2084 return revset.baseset(), None, None
2081 2085 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2082 2086 if opts.get('rev'):
2083 2087 # User-specified revs might be unsorted, but don't sort before
2084 2088 # _makelogrevset because it might depend on the order of revs
2085 2089 revs.sort(reverse=True)
2086 2090 if expr:
2087 2091 # Revset matchers often operate faster on revisions in changelog
2088 2092 # order, because most filters deal with the changelog.
2089 2093 revs.reverse()
2090 2094 matcher = revset.match(repo.ui, expr)
2091 2095 # Revset matches can reorder revisions. "A or B" typically returns
2092 2096 # returns the revision matching A then the revision matching B. Sort
2093 2097 # again to fix that.
2094 2098 revs = matcher(repo, revs)
2095 2099 revs.sort(reverse=True)
2096 2100 if limit is not None:
2097 2101 limitedrevs = []
2098 2102 for idx, rev in enumerate(revs):
2099 2103 if idx >= limit:
2100 2104 break
2101 2105 limitedrevs.append(rev)
2102 2106 revs = revset.baseset(limitedrevs)
2103 2107
2104 2108 return revs, expr, filematcher
2105 2109
2106 2110 def getlogrevs(repo, pats, opts):
2107 2111 """Return (revs, expr, filematcher) where revs is an iterable of
2108 2112 revision numbers, expr is a revset string built from log options
2109 2113 and file patterns or None, and used to filter 'revs'. If --stat or
2110 2114 --patch are not passed filematcher is None. Otherwise it is a
2111 2115 callable taking a revision number and returning a match objects
2112 2116 filtering the files to be detailed when displaying the revision.
2113 2117 """
2114 2118 limit = loglimit(opts)
2115 2119 revs = _logrevs(repo, opts)
2116 2120 if not revs:
2117 2121 return revset.baseset([]), None, None
2118 2122 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2119 2123 if expr:
2120 2124 # Revset matchers often operate faster on revisions in changelog
2121 2125 # order, because most filters deal with the changelog.
2122 2126 if not opts.get('rev'):
2123 2127 revs.reverse()
2124 2128 matcher = revset.match(repo.ui, expr)
2125 2129 # Revset matches can reorder revisions. "A or B" typically returns
2126 2130 # returns the revision matching A then the revision matching B. Sort
2127 2131 # again to fix that.
2128 2132 revs = matcher(repo, revs)
2129 2133 if not opts.get('rev'):
2130 2134 revs.sort(reverse=True)
2131 2135 if limit is not None:
2132 2136 limitedrevs = []
2133 2137 for idx, r in enumerate(revs):
2134 2138 if limit <= idx:
2135 2139 break
2136 2140 limitedrevs.append(r)
2137 2141 revs = revset.baseset(limitedrevs)
2138 2142
2139 2143 return revs, expr, filematcher
2140 2144
2141 2145 def displaygraph(ui, dag, displayer, showparents, edgefn, getrenamed=None,
2142 2146 filematcher=None):
2143 2147 seen, state = [], graphmod.asciistate()
2144 2148 for rev, type, ctx, parents in dag:
2145 2149 char = 'o'
2146 2150 if ctx.node() in showparents:
2147 2151 char = '@'
2148 2152 elif ctx.obsolete():
2149 2153 char = 'x'
2150 2154 elif ctx.closesbranch():
2151 2155 char = '_'
2152 2156 copies = None
2153 2157 if getrenamed and ctx.rev():
2154 2158 copies = []
2155 2159 for fn in ctx.files():
2156 2160 rename = getrenamed(fn, ctx.rev())
2157 2161 if rename:
2158 2162 copies.append((fn, rename[0]))
2159 2163 revmatchfn = None
2160 2164 if filematcher is not None:
2161 2165 revmatchfn = filematcher(ctx.rev())
2162 2166 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2163 2167 lines = displayer.hunk.pop(rev).split('\n')
2164 2168 if not lines[-1]:
2165 2169 del lines[-1]
2166 2170 displayer.flush(rev)
2167 2171 edges = edgefn(type, char, lines, seen, rev, parents)
2168 2172 for type, char, lines, coldata in edges:
2169 2173 graphmod.ascii(ui, state, type, char, lines, coldata)
2170 2174 displayer.close()
2171 2175
2172 2176 def graphlog(ui, repo, *pats, **opts):
2173 2177 # Parameters are identical to log command ones
2174 2178 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2175 2179 revdag = graphmod.dagwalker(repo, revs)
2176 2180
2177 2181 getrenamed = None
2178 2182 if opts.get('copies'):
2179 2183 endrev = None
2180 2184 if opts.get('rev'):
2181 2185 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2182 2186 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2183 2187 displayer = show_changeset(ui, repo, opts, buffered=True)
2184 2188 showparents = [ctx.node() for ctx in repo[None].parents()]
2185 2189 displaygraph(ui, revdag, displayer, showparents,
2186 2190 graphmod.asciiedges, getrenamed, filematcher)
2187 2191
2188 2192 def checkunsupportedgraphflags(pats, opts):
2189 2193 for op in ["newest_first"]:
2190 2194 if op in opts and opts[op]:
2191 2195 raise util.Abort(_("-G/--graph option is incompatible with --%s")
2192 2196 % op.replace("_", "-"))
2193 2197
2194 2198 def graphrevs(repo, nodes, opts):
2195 2199 limit = loglimit(opts)
2196 2200 nodes.reverse()
2197 2201 if limit is not None:
2198 2202 nodes = nodes[:limit]
2199 2203 return graphmod.nodes(repo, nodes)
2200 2204
2201 2205 def add(ui, repo, match, prefix, explicitonly, **opts):
2202 2206 join = lambda f: os.path.join(prefix, f)
2203 2207 bad = []
2204 2208 oldbad = match.bad
2205 2209 match.bad = lambda x, y: bad.append(x) or oldbad(x, y)
2206 2210 names = []
2207 2211 wctx = repo[None]
2208 2212 cca = None
2209 2213 abort, warn = scmutil.checkportabilityalert(ui)
2210 2214 if abort or warn:
2211 2215 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2212 2216 for f in wctx.walk(match):
2213 2217 exact = match.exact(f)
2214 2218 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2215 2219 if cca:
2216 2220 cca(f)
2217 2221 names.append(f)
2218 2222 if ui.verbose or not exact:
2219 2223 ui.status(_('adding %s\n') % match.rel(f))
2220 2224
2221 2225 for subpath in sorted(wctx.substate):
2222 2226 sub = wctx.sub(subpath)
2223 2227 try:
2224 2228 submatch = matchmod.narrowmatcher(subpath, match)
2225 2229 if opts.get('subrepos'):
2226 2230 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2227 2231 else:
2228 2232 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2229 2233 except error.LookupError:
2230 2234 ui.status(_("skipping missing subrepository: %s\n")
2231 2235 % join(subpath))
2232 2236
2233 2237 if not opts.get('dry_run'):
2234 2238 rejected = wctx.add(names, prefix)
2235 2239 bad.extend(f for f in rejected if f in match.files())
2236 2240 return bad
2237 2241
2238 2242 def forget(ui, repo, match, prefix, explicitonly):
2239 2243 join = lambda f: os.path.join(prefix, f)
2240 2244 bad = []
2241 2245 oldbad = match.bad
2242 2246 match.bad = lambda x, y: bad.append(x) or oldbad(x, y)
2243 2247 wctx = repo[None]
2244 2248 forgot = []
2245 2249 s = repo.status(match=match, clean=True)
2246 2250 forget = sorted(s[0] + s[1] + s[3] + s[6])
2247 2251 if explicitonly:
2248 2252 forget = [f for f in forget if match.exact(f)]
2249 2253
2250 2254 for subpath in sorted(wctx.substate):
2251 2255 sub = wctx.sub(subpath)
2252 2256 try:
2253 2257 submatch = matchmod.narrowmatcher(subpath, match)
2254 2258 subbad, subforgot = sub.forget(submatch, prefix)
2255 2259 bad.extend([subpath + '/' + f for f in subbad])
2256 2260 forgot.extend([subpath + '/' + f for f in subforgot])
2257 2261 except error.LookupError:
2258 2262 ui.status(_("skipping missing subrepository: %s\n")
2259 2263 % join(subpath))
2260 2264
2261 2265 if not explicitonly:
2262 2266 for f in match.files():
2263 2267 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2264 2268 if f not in forgot:
2265 2269 if repo.wvfs.exists(f):
2266 2270 # Don't complain if the exact case match wasn't given.
2267 2271 # But don't do this until after checking 'forgot', so
2268 2272 # that subrepo files aren't normalized, and this op is
2269 2273 # purely from data cached by the status walk above.
2270 2274 if repo.dirstate.normalize(f) in repo.dirstate:
2271 2275 continue
2272 2276 ui.warn(_('not removing %s: '
2273 2277 'file is already untracked\n')
2274 2278 % match.rel(f))
2275 2279 bad.append(f)
2276 2280
2277 2281 for f in forget:
2278 2282 if ui.verbose or not match.exact(f):
2279 2283 ui.status(_('removing %s\n') % match.rel(f))
2280 2284
2281 2285 rejected = wctx.forget(forget, prefix)
2282 2286 bad.extend(f for f in rejected if f in match.files())
2283 2287 forgot.extend(f for f in forget if f not in rejected)
2284 2288 return bad, forgot
2285 2289
2286 2290 def files(ui, ctx, m, fm, fmt, subrepos):
2287 2291 rev = ctx.rev()
2288 2292 ret = 1
2289 2293 ds = ctx.repo().dirstate
2290 2294
2291 2295 for f in ctx.matches(m):
2292 2296 if rev is None and ds[f] == 'r':
2293 2297 continue
2294 2298 fm.startitem()
2295 2299 if ui.verbose:
2296 2300 fc = ctx[f]
2297 2301 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2298 2302 fm.data(abspath=f)
2299 2303 fm.write('path', fmt, m.rel(f))
2300 2304 ret = 0
2301 2305
2302 2306 for subpath in sorted(ctx.substate):
2303 2307 def matchessubrepo(subpath):
2304 2308 return (m.always() or m.exact(subpath)
2305 2309 or any(f.startswith(subpath + '/') for f in m.files()))
2306 2310
2307 2311 if subrepos or matchessubrepo(subpath):
2308 2312 sub = ctx.sub(subpath)
2309 2313 try:
2310 2314 submatch = matchmod.narrowmatcher(subpath, m)
2311 2315 if sub.printfiles(ui, submatch, fm, fmt, subrepos) == 0:
2312 2316 ret = 0
2313 2317 except error.LookupError:
2314 2318 ui.status(_("skipping missing subrepository: %s\n")
2315 2319 % m.abs(subpath))
2316 2320
2317 2321 return ret
2318 2322
2319 2323 def remove(ui, repo, m, prefix, after, force, subrepos):
2320 2324 join = lambda f: os.path.join(prefix, f)
2321 2325 ret = 0
2322 2326 s = repo.status(match=m, clean=True)
2323 2327 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2324 2328
2325 2329 wctx = repo[None]
2326 2330
2327 2331 for subpath in sorted(wctx.substate):
2328 2332 def matchessubrepo(matcher, subpath):
2329 2333 if matcher.exact(subpath):
2330 2334 return True
2331 2335 for f in matcher.files():
2332 2336 if f.startswith(subpath):
2333 2337 return True
2334 2338 return False
2335 2339
2336 2340 if subrepos or matchessubrepo(m, subpath):
2337 2341 sub = wctx.sub(subpath)
2338 2342 try:
2339 2343 submatch = matchmod.narrowmatcher(subpath, m)
2340 2344 if sub.removefiles(submatch, prefix, after, force, subrepos):
2341 2345 ret = 1
2342 2346 except error.LookupError:
2343 2347 ui.status(_("skipping missing subrepository: %s\n")
2344 2348 % join(subpath))
2345 2349
2346 2350 # warn about failure to delete explicit files/dirs
2347 2351 deleteddirs = util.dirs(deleted)
2348 2352 for f in m.files():
2349 2353 def insubrepo():
2350 2354 for subpath in wctx.substate:
2351 2355 if f.startswith(subpath):
2352 2356 return True
2353 2357 return False
2354 2358
2355 2359 isdir = f in deleteddirs or wctx.hasdir(f)
2356 2360 if f in repo.dirstate or isdir or f == '.' or insubrepo():
2357 2361 continue
2358 2362
2359 2363 if repo.wvfs.exists(f):
2360 2364 if repo.wvfs.isdir(f):
2361 2365 ui.warn(_('not removing %s: no tracked files\n')
2362 2366 % m.rel(f))
2363 2367 else:
2364 2368 ui.warn(_('not removing %s: file is untracked\n')
2365 2369 % m.rel(f))
2366 2370 # missing files will generate a warning elsewhere
2367 2371 ret = 1
2368 2372
2369 2373 if force:
2370 2374 list = modified + deleted + clean + added
2371 2375 elif after:
2372 2376 list = deleted
2373 2377 for f in modified + added + clean:
2374 2378 ui.warn(_('not removing %s: file still exists\n') % m.rel(f))
2375 2379 ret = 1
2376 2380 else:
2377 2381 list = deleted + clean
2378 2382 for f in modified:
2379 2383 ui.warn(_('not removing %s: file is modified (use -f'
2380 2384 ' to force removal)\n') % m.rel(f))
2381 2385 ret = 1
2382 2386 for f in added:
2383 2387 ui.warn(_('not removing %s: file has been marked for add'
2384 2388 ' (use forget to undo)\n') % m.rel(f))
2385 2389 ret = 1
2386 2390
2387 2391 for f in sorted(list):
2388 2392 if ui.verbose or not m.exact(f):
2389 2393 ui.status(_('removing %s\n') % m.rel(f))
2390 2394
2391 2395 wlock = repo.wlock()
2392 2396 try:
2393 2397 if not after:
2394 2398 for f in list:
2395 2399 if f in added:
2396 2400 continue # we never unlink added files on remove
2397 2401 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
2398 2402 repo[None].forget(list)
2399 2403 finally:
2400 2404 wlock.release()
2401 2405
2402 2406 return ret
2403 2407
2404 2408 def cat(ui, repo, ctx, matcher, prefix, **opts):
2405 2409 err = 1
2406 2410
2407 2411 def write(path):
2408 2412 fp = makefileobj(repo, opts.get('output'), ctx.node(),
2409 2413 pathname=os.path.join(prefix, path))
2410 2414 data = ctx[path].data()
2411 2415 if opts.get('decode'):
2412 2416 data = repo.wwritedata(path, data)
2413 2417 fp.write(data)
2414 2418 fp.close()
2415 2419
2416 2420 # Automation often uses hg cat on single files, so special case it
2417 2421 # for performance to avoid the cost of parsing the manifest.
2418 2422 if len(matcher.files()) == 1 and not matcher.anypats():
2419 2423 file = matcher.files()[0]
2420 2424 mf = repo.manifest
2421 2425 mfnode = ctx.manifestnode()
2422 2426 if mfnode and mf.find(mfnode, file)[0]:
2423 2427 write(file)
2424 2428 return 0
2425 2429
2426 2430 # Don't warn about "missing" files that are really in subrepos
2427 2431 bad = matcher.bad
2428 2432
2429 2433 def badfn(path, msg):
2430 2434 for subpath in ctx.substate:
2431 2435 if path.startswith(subpath):
2432 2436 return
2433 2437 bad(path, msg)
2434 2438
2435 2439 matcher.bad = badfn
2436 2440
2437 2441 for abs in ctx.walk(matcher):
2438 2442 write(abs)
2439 2443 err = 0
2440 2444
2441 2445 matcher.bad = bad
2442 2446
2443 2447 for subpath in sorted(ctx.substate):
2444 2448 sub = ctx.sub(subpath)
2445 2449 try:
2446 2450 submatch = matchmod.narrowmatcher(subpath, matcher)
2447 2451
2448 2452 if not sub.cat(submatch, os.path.join(prefix, sub._path),
2449 2453 **opts):
2450 2454 err = 0
2451 2455 except error.RepoLookupError:
2452 2456 ui.status(_("skipping missing subrepository: %s\n")
2453 2457 % os.path.join(prefix, subpath))
2454 2458
2455 2459 return err
2456 2460
2457 2461 def commit(ui, repo, commitfunc, pats, opts):
2458 2462 '''commit the specified files or all outstanding changes'''
2459 2463 date = opts.get('date')
2460 2464 if date:
2461 2465 opts['date'] = util.parsedate(date)
2462 2466 message = logmessage(ui, opts)
2463 2467 matcher = scmutil.match(repo[None], pats, opts)
2464 2468
2465 2469 # extract addremove carefully -- this function can be called from a command
2466 2470 # that doesn't support addremove
2467 2471 if opts.get('addremove'):
2468 2472 if scmutil.addremove(repo, matcher, "", opts) != 0:
2469 2473 raise util.Abort(
2470 2474 _("failed to mark all new/missing files as added/removed"))
2471 2475
2472 2476 return commitfunc(ui, repo, message, matcher, opts)
2473 2477
2474 2478 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2475 2479 # amend will reuse the existing user if not specified, but the obsolete
2476 2480 # marker creation requires that the current user's name is specified.
2477 2481 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2478 2482 ui.username() # raise exception if username not set
2479 2483
2480 2484 ui.note(_('amending changeset %s\n') % old)
2481 2485 base = old.p1()
2482 2486
2483 2487 wlock = dsguard = lock = newid = None
2484 2488 try:
2485 2489 wlock = repo.wlock()
2486 2490 dsguard = dirstateguard(repo, 'amend')
2487 2491 lock = repo.lock()
2488 2492 tr = repo.transaction('amend')
2489 2493 try:
2490 2494 # See if we got a message from -m or -l, if not, open the editor
2491 2495 # with the message of the changeset to amend
2492 2496 message = logmessage(ui, opts)
2493 2497 # ensure logfile does not conflict with later enforcement of the
2494 2498 # message. potential logfile content has been processed by
2495 2499 # `logmessage` anyway.
2496 2500 opts.pop('logfile')
2497 2501 # First, do a regular commit to record all changes in the working
2498 2502 # directory (if there are any)
2499 2503 ui.callhooks = False
2500 2504 activebookmark = repo._activebookmark
2501 2505 try:
2502 2506 repo._activebookmark = None
2503 2507 opts['message'] = 'temporary amend commit for %s' % old
2504 2508 node = commit(ui, repo, commitfunc, pats, opts)
2505 2509 finally:
2506 2510 repo._activebookmark = activebookmark
2507 2511 ui.callhooks = True
2508 2512 ctx = repo[node]
2509 2513
2510 2514 # Participating changesets:
2511 2515 #
2512 2516 # node/ctx o - new (intermediate) commit that contains changes
2513 2517 # | from working dir to go into amending commit
2514 2518 # | (or a workingctx if there were no changes)
2515 2519 # |
2516 2520 # old o - changeset to amend
2517 2521 # |
2518 2522 # base o - parent of amending changeset
2519 2523
2520 2524 # Update extra dict from amended commit (e.g. to preserve graft
2521 2525 # source)
2522 2526 extra.update(old.extra())
2523 2527
2524 2528 # Also update it from the intermediate commit or from the wctx
2525 2529 extra.update(ctx.extra())
2526 2530
2527 2531 if len(old.parents()) > 1:
2528 2532 # ctx.files() isn't reliable for merges, so fall back to the
2529 2533 # slower repo.status() method
2530 2534 files = set([fn for st in repo.status(base, old)[:3]
2531 2535 for fn in st])
2532 2536 else:
2533 2537 files = set(old.files())
2534 2538
2535 2539 # Second, we use either the commit we just did, or if there were no
2536 2540 # changes the parent of the working directory as the version of the
2537 2541 # files in the final amend commit
2538 2542 if node:
2539 2543 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2540 2544
2541 2545 user = ctx.user()
2542 2546 date = ctx.date()
2543 2547 # Recompute copies (avoid recording a -> b -> a)
2544 2548 copied = copies.pathcopies(base, ctx)
2545 2549 if old.p2:
2546 2550 copied.update(copies.pathcopies(old.p2(), ctx))
2547 2551
2548 2552 # Prune files which were reverted by the updates: if old
2549 2553 # introduced file X and our intermediate commit, node,
2550 2554 # renamed that file, then those two files are the same and
2551 2555 # we can discard X from our list of files. Likewise if X
2552 2556 # was deleted, it's no longer relevant
2553 2557 files.update(ctx.files())
2554 2558
2555 2559 def samefile(f):
2556 2560 if f in ctx.manifest():
2557 2561 a = ctx.filectx(f)
2558 2562 if f in base.manifest():
2559 2563 b = base.filectx(f)
2560 2564 return (not a.cmp(b)
2561 2565 and a.flags() == b.flags())
2562 2566 else:
2563 2567 return False
2564 2568 else:
2565 2569 return f not in base.manifest()
2566 2570 files = [f for f in files if not samefile(f)]
2567 2571
2568 2572 def filectxfn(repo, ctx_, path):
2569 2573 try:
2570 2574 fctx = ctx[path]
2571 2575 flags = fctx.flags()
2572 2576 mctx = context.memfilectx(repo,
2573 2577 fctx.path(), fctx.data(),
2574 2578 islink='l' in flags,
2575 2579 isexec='x' in flags,
2576 2580 copied=copied.get(path))
2577 2581 return mctx
2578 2582 except KeyError:
2579 2583 return None
2580 2584 else:
2581 2585 ui.note(_('copying changeset %s to %s\n') % (old, base))
2582 2586
2583 2587 # Use version of files as in the old cset
2584 2588 def filectxfn(repo, ctx_, path):
2585 2589 try:
2586 2590 return old.filectx(path)
2587 2591 except KeyError:
2588 2592 return None
2589 2593
2590 2594 user = opts.get('user') or old.user()
2591 2595 date = opts.get('date') or old.date()
2592 2596 editform = mergeeditform(old, 'commit.amend')
2593 2597 editor = getcommiteditor(editform=editform, **opts)
2594 2598 if not message:
2595 2599 editor = getcommiteditor(edit=True, editform=editform)
2596 2600 message = old.description()
2597 2601
2598 2602 pureextra = extra.copy()
2599 2603 extra['amend_source'] = old.hex()
2600 2604
2601 2605 new = context.memctx(repo,
2602 2606 parents=[base.node(), old.p2().node()],
2603 2607 text=message,
2604 2608 files=files,
2605 2609 filectxfn=filectxfn,
2606 2610 user=user,
2607 2611 date=date,
2608 2612 extra=extra,
2609 2613 editor=editor)
2610 2614
2611 2615 newdesc = changelog.stripdesc(new.description())
2612 2616 if ((not node)
2613 2617 and newdesc == old.description()
2614 2618 and user == old.user()
2615 2619 and date == old.date()
2616 2620 and pureextra == old.extra()):
2617 2621 # nothing changed. continuing here would create a new node
2618 2622 # anyway because of the amend_source noise.
2619 2623 #
2620 2624 # This not what we expect from amend.
2621 2625 return old.node()
2622 2626
2623 2627 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2624 2628 try:
2625 2629 if opts.get('secret'):
2626 2630 commitphase = 'secret'
2627 2631 else:
2628 2632 commitphase = old.phase()
2629 2633 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2630 2634 newid = repo.commitctx(new)
2631 2635 finally:
2632 2636 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2633 2637 if newid != old.node():
2634 2638 # Reroute the working copy parent to the new changeset
2635 2639 repo.setparents(newid, nullid)
2636 2640
2637 2641 # Move bookmarks from old parent to amend commit
2638 2642 bms = repo.nodebookmarks(old.node())
2639 2643 if bms:
2640 2644 marks = repo._bookmarks
2641 2645 for bm in bms:
2642 2646 marks[bm] = newid
2643 2647 marks.write()
2644 2648 #commit the whole amend process
2645 2649 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2646 2650 if createmarkers and newid != old.node():
2647 2651 # mark the new changeset as successor of the rewritten one
2648 2652 new = repo[newid]
2649 2653 obs = [(old, (new,))]
2650 2654 if node:
2651 2655 obs.append((ctx, ()))
2652 2656
2653 2657 obsolete.createmarkers(repo, obs)
2654 2658 tr.close()
2655 2659 finally:
2656 2660 tr.release()
2657 2661 dsguard.close()
2658 2662 if not createmarkers and newid != old.node():
2659 2663 # Strip the intermediate commit (if there was one) and the amended
2660 2664 # commit
2661 2665 if node:
2662 2666 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2663 2667 ui.note(_('stripping amended changeset %s\n') % old)
2664 2668 repair.strip(ui, repo, old.node(), topic='amend-backup')
2665 2669 finally:
2666 2670 lockmod.release(lock, dsguard, wlock)
2667 2671 return newid
2668 2672
2669 2673 def commiteditor(repo, ctx, subs, editform=''):
2670 2674 if ctx.description():
2671 2675 return ctx.description()
2672 2676 return commitforceeditor(repo, ctx, subs, editform=editform)
2673 2677
2674 2678 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2675 2679 editform=''):
2676 2680 if not extramsg:
2677 2681 extramsg = _("Leave message empty to abort commit.")
2678 2682
2679 2683 forms = [e for e in editform.split('.') if e]
2680 2684 forms.insert(0, 'changeset')
2681 2685 while forms:
2682 2686 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2683 2687 if tmpl:
2684 2688 committext = buildcommittemplate(repo, ctx, subs, extramsg, tmpl)
2685 2689 break
2686 2690 forms.pop()
2687 2691 else:
2688 2692 committext = buildcommittext(repo, ctx, subs, extramsg)
2689 2693
2690 2694 # run editor in the repository root
2691 2695 olddir = os.getcwd()
2692 2696 os.chdir(repo.root)
2693 2697 text = repo.ui.edit(committext, ctx.user(), ctx.extra(), editform=editform)
2694 2698 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2695 2699 os.chdir(olddir)
2696 2700
2697 2701 if finishdesc:
2698 2702 text = finishdesc(text)
2699 2703 if not text.strip():
2700 2704 raise util.Abort(_("empty commit message"))
2701 2705
2702 2706 return text
2703 2707
2704 2708 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2705 2709 ui = repo.ui
2706 2710 tmpl, mapfile = gettemplate(ui, tmpl, None)
2707 2711
2708 2712 try:
2709 2713 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2710 2714 except SyntaxError, inst:
2711 2715 raise util.Abort(inst.args[0])
2712 2716
2713 2717 for k, v in repo.ui.configitems('committemplate'):
2714 2718 if k != 'changeset':
2715 2719 t.t.cache[k] = v
2716 2720
2717 2721 if not extramsg:
2718 2722 extramsg = '' # ensure that extramsg is string
2719 2723
2720 2724 ui.pushbuffer()
2721 2725 t.show(ctx, extramsg=extramsg)
2722 2726 return ui.popbuffer()
2723 2727
2724 2728 def buildcommittext(repo, ctx, subs, extramsg):
2725 2729 edittext = []
2726 2730 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2727 2731 if ctx.description():
2728 2732 edittext.append(ctx.description())
2729 2733 edittext.append("")
2730 2734 edittext.append("") # Empty line between message and comments.
2731 2735 edittext.append(_("HG: Enter commit message."
2732 2736 " Lines beginning with 'HG:' are removed."))
2733 2737 edittext.append("HG: %s" % extramsg)
2734 2738 edittext.append("HG: --")
2735 2739 edittext.append(_("HG: user: %s") % ctx.user())
2736 2740 if ctx.p2():
2737 2741 edittext.append(_("HG: branch merge"))
2738 2742 if ctx.branch():
2739 2743 edittext.append(_("HG: branch '%s'") % ctx.branch())
2740 2744 if bookmarks.isactivewdirparent(repo):
2741 2745 edittext.append(_("HG: bookmark '%s'") % repo._activebookmark)
2742 2746 edittext.extend([_("HG: subrepo %s") % s for s in subs])
2743 2747 edittext.extend([_("HG: added %s") % f for f in added])
2744 2748 edittext.extend([_("HG: changed %s") % f for f in modified])
2745 2749 edittext.extend([_("HG: removed %s") % f for f in removed])
2746 2750 if not added and not modified and not removed:
2747 2751 edittext.append(_("HG: no files changed"))
2748 2752 edittext.append("")
2749 2753
2750 2754 return "\n".join(edittext)
2751 2755
2752 2756 def commitstatus(repo, node, branch, bheads=None, opts={}):
2753 2757 ctx = repo[node]
2754 2758 parents = ctx.parents()
2755 2759
2756 2760 if (not opts.get('amend') and bheads and node not in bheads and not
2757 2761 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2758 2762 repo.ui.status(_('created new head\n'))
2759 2763 # The message is not printed for initial roots. For the other
2760 2764 # changesets, it is printed in the following situations:
2761 2765 #
2762 2766 # Par column: for the 2 parents with ...
2763 2767 # N: null or no parent
2764 2768 # B: parent is on another named branch
2765 2769 # C: parent is a regular non head changeset
2766 2770 # H: parent was a branch head of the current branch
2767 2771 # Msg column: whether we print "created new head" message
2768 2772 # In the following, it is assumed that there already exists some
2769 2773 # initial branch heads of the current branch, otherwise nothing is
2770 2774 # printed anyway.
2771 2775 #
2772 2776 # Par Msg Comment
2773 2777 # N N y additional topo root
2774 2778 #
2775 2779 # B N y additional branch root
2776 2780 # C N y additional topo head
2777 2781 # H N n usual case
2778 2782 #
2779 2783 # B B y weird additional branch root
2780 2784 # C B y branch merge
2781 2785 # H B n merge with named branch
2782 2786 #
2783 2787 # C C y additional head from merge
2784 2788 # C H n merge with a head
2785 2789 #
2786 2790 # H H n head merge: head count decreases
2787 2791
2788 2792 if not opts.get('close_branch'):
2789 2793 for r in parents:
2790 2794 if r.closesbranch() and r.branch() == branch:
2791 2795 repo.ui.status(_('reopening closed branch head %d\n') % r)
2792 2796
2793 2797 if repo.ui.debugflag:
2794 2798 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2795 2799 elif repo.ui.verbose:
2796 2800 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2797 2801
2798 2802 def revert(ui, repo, ctx, parents, *pats, **opts):
2799 2803 parent, p2 = parents
2800 2804 node = ctx.node()
2801 2805
2802 2806 mf = ctx.manifest()
2803 2807 if node == p2:
2804 2808 parent = p2
2805 2809 if node == parent:
2806 2810 pmf = mf
2807 2811 else:
2808 2812 pmf = None
2809 2813
2810 2814 # need all matching names in dirstate and manifest of target rev,
2811 2815 # so have to walk both. do not print errors if files exist in one
2812 2816 # but not other. in both cases, filesets should be evaluated against
2813 2817 # workingctx to get consistent result (issue4497). this means 'set:**'
2814 2818 # cannot be used to select missing files from target rev.
2815 2819
2816 2820 # `names` is a mapping for all elements in working copy and target revision
2817 2821 # The mapping is in the form:
2818 2822 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2819 2823 names = {}
2820 2824
2821 2825 wlock = repo.wlock()
2822 2826 try:
2823 2827 ## filling of the `names` mapping
2824 2828 # walk dirstate to fill `names`
2825 2829
2826 2830 interactive = opts.get('interactive', False)
2827 2831 wctx = repo[None]
2828 2832 m = scmutil.match(wctx, pats, opts)
2829 2833
2830 2834 # we'll need this later
2831 2835 targetsubs = sorted(s for s in wctx.substate if m(s))
2832 2836
2833 2837 if not m.always():
2834 2838 m.bad = lambda x, y: False
2835 2839 for abs in repo.walk(m):
2836 2840 names[abs] = m.rel(abs), m.exact(abs)
2837 2841
2838 2842 # walk target manifest to fill `names`
2839 2843
2840 2844 def badfn(path, msg):
2841 2845 if path in names:
2842 2846 return
2843 2847 if path in ctx.substate:
2844 2848 return
2845 2849 path_ = path + '/'
2846 2850 for f in names:
2847 2851 if f.startswith(path_):
2848 2852 return
2849 2853 ui.warn("%s: %s\n" % (m.rel(path), msg))
2850 2854
2851 2855 m.bad = badfn
2852 2856 for abs in ctx.walk(m):
2853 2857 if abs not in names:
2854 2858 names[abs] = m.rel(abs), m.exact(abs)
2855 2859
2856 2860 # Find status of all file in `names`.
2857 2861 m = scmutil.matchfiles(repo, names)
2858 2862
2859 2863 changes = repo.status(node1=node, match=m,
2860 2864 unknown=True, ignored=True, clean=True)
2861 2865 else:
2862 2866 changes = repo.status(node1=node, match=m)
2863 2867 for kind in changes:
2864 2868 for abs in kind:
2865 2869 names[abs] = m.rel(abs), m.exact(abs)
2866 2870
2867 2871 m = scmutil.matchfiles(repo, names)
2868 2872
2869 2873 modified = set(changes.modified)
2870 2874 added = set(changes.added)
2871 2875 removed = set(changes.removed)
2872 2876 _deleted = set(changes.deleted)
2873 2877 unknown = set(changes.unknown)
2874 2878 unknown.update(changes.ignored)
2875 2879 clean = set(changes.clean)
2876 2880 modadded = set()
2877 2881
2878 2882 # split between files known in target manifest and the others
2879 2883 smf = set(mf)
2880 2884
2881 2885 # determine the exact nature of the deleted changesets
2882 2886 deladded = _deleted - smf
2883 2887 deleted = _deleted - deladded
2884 2888
2885 2889 # We need to account for the state of the file in the dirstate,
2886 2890 # even when we revert against something else than parent. This will
2887 2891 # slightly alter the behavior of revert (doing back up or not, delete
2888 2892 # or just forget etc).
2889 2893 if parent == node:
2890 2894 dsmodified = modified
2891 2895 dsadded = added
2892 2896 dsremoved = removed
2893 2897 # store all local modifications, useful later for rename detection
2894 2898 localchanges = dsmodified | dsadded
2895 2899 modified, added, removed = set(), set(), set()
2896 2900 else:
2897 2901 changes = repo.status(node1=parent, match=m)
2898 2902 dsmodified = set(changes.modified)
2899 2903 dsadded = set(changes.added)
2900 2904 dsremoved = set(changes.removed)
2901 2905 # store all local modifications, useful later for rename detection
2902 2906 localchanges = dsmodified | dsadded
2903 2907
2904 2908 # only take into account for removes between wc and target
2905 2909 clean |= dsremoved - removed
2906 2910 dsremoved &= removed
2907 2911 # distinct between dirstate remove and other
2908 2912 removed -= dsremoved
2909 2913
2910 2914 modadded = added & dsmodified
2911 2915 added -= modadded
2912 2916
2913 2917 # tell newly modified apart.
2914 2918 dsmodified &= modified
2915 2919 dsmodified |= modified & dsadded # dirstate added may needs backup
2916 2920 modified -= dsmodified
2917 2921
2918 2922 # We need to wait for some post-processing to update this set
2919 2923 # before making the distinction. The dirstate will be used for
2920 2924 # that purpose.
2921 2925 dsadded = added
2922 2926
2923 2927 # in case of merge, files that are actually added can be reported as
2924 2928 # modified, we need to post process the result
2925 2929 if p2 != nullid:
2926 2930 if pmf is None:
2927 2931 # only need parent manifest in the merge case,
2928 2932 # so do not read by default
2929 2933 pmf = repo[parent].manifest()
2930 2934 mergeadd = dsmodified - set(pmf)
2931 2935 dsadded |= mergeadd
2932 2936 dsmodified -= mergeadd
2933 2937
2934 2938 # if f is a rename, update `names` to also revert the source
2935 2939 cwd = repo.getcwd()
2936 2940 for f in localchanges:
2937 2941 src = repo.dirstate.copied(f)
2938 2942 # XXX should we check for rename down to target node?
2939 2943 if src and src not in names and repo.dirstate[src] == 'r':
2940 2944 dsremoved.add(src)
2941 2945 names[src] = (repo.pathto(src, cwd), True)
2942 2946
2943 2947 # distinguish between file to forget and the other
2944 2948 added = set()
2945 2949 for abs in dsadded:
2946 2950 if repo.dirstate[abs] != 'a':
2947 2951 added.add(abs)
2948 2952 dsadded -= added
2949 2953
2950 2954 for abs in deladded:
2951 2955 if repo.dirstate[abs] == 'a':
2952 2956 dsadded.add(abs)
2953 2957 deladded -= dsadded
2954 2958
2955 2959 # For files marked as removed, we check if an unknown file is present at
2956 2960 # the same path. If a such file exists it may need to be backed up.
2957 2961 # Making the distinction at this stage helps have simpler backup
2958 2962 # logic.
2959 2963 removunk = set()
2960 2964 for abs in removed:
2961 2965 target = repo.wjoin(abs)
2962 2966 if os.path.lexists(target):
2963 2967 removunk.add(abs)
2964 2968 removed -= removunk
2965 2969
2966 2970 dsremovunk = set()
2967 2971 for abs in dsremoved:
2968 2972 target = repo.wjoin(abs)
2969 2973 if os.path.lexists(target):
2970 2974 dsremovunk.add(abs)
2971 2975 dsremoved -= dsremovunk
2972 2976
2973 2977 # action to be actually performed by revert
2974 2978 # (<list of file>, message>) tuple
2975 2979 actions = {'revert': ([], _('reverting %s\n')),
2976 2980 'add': ([], _('adding %s\n')),
2977 2981 'remove': ([], _('removing %s\n')),
2978 2982 'drop': ([], _('removing %s\n')),
2979 2983 'forget': ([], _('forgetting %s\n')),
2980 2984 'undelete': ([], _('undeleting %s\n')),
2981 2985 'noop': (None, _('no changes needed to %s\n')),
2982 2986 'unknown': (None, _('file not managed: %s\n')),
2983 2987 }
2984 2988
2985 2989 # "constant" that convey the backup strategy.
2986 2990 # All set to `discard` if `no-backup` is set do avoid checking
2987 2991 # no_backup lower in the code.
2988 2992 # These values are ordered for comparison purposes
2989 2993 backup = 2 # unconditionally do backup
2990 2994 check = 1 # check if the existing file differs from target
2991 2995 discard = 0 # never do backup
2992 2996 if opts.get('no_backup'):
2993 2997 backup = check = discard
2994 2998
2995 2999 backupanddel = actions['remove']
2996 3000 if not opts.get('no_backup'):
2997 3001 backupanddel = actions['drop']
2998 3002
2999 3003 disptable = (
3000 3004 # dispatch table:
3001 3005 # file state
3002 3006 # action
3003 3007 # make backup
3004 3008
3005 3009 ## Sets that results that will change file on disk
3006 3010 # Modified compared to target, no local change
3007 3011 (modified, actions['revert'], discard),
3008 3012 # Modified compared to target, but local file is deleted
3009 3013 (deleted, actions['revert'], discard),
3010 3014 # Modified compared to target, local change
3011 3015 (dsmodified, actions['revert'], backup),
3012 3016 # Added since target
3013 3017 (added, actions['remove'], discard),
3014 3018 # Added in working directory
3015 3019 (dsadded, actions['forget'], discard),
3016 3020 # Added since target, have local modification
3017 3021 (modadded, backupanddel, backup),
3018 3022 # Added since target but file is missing in working directory
3019 3023 (deladded, actions['drop'], discard),
3020 3024 # Removed since target, before working copy parent
3021 3025 (removed, actions['add'], discard),
3022 3026 # Same as `removed` but an unknown file exists at the same path
3023 3027 (removunk, actions['add'], check),
3024 3028 # Removed since targe, marked as such in working copy parent
3025 3029 (dsremoved, actions['undelete'], discard),
3026 3030 # Same as `dsremoved` but an unknown file exists at the same path
3027 3031 (dsremovunk, actions['undelete'], check),
3028 3032 ## the following sets does not result in any file changes
3029 3033 # File with no modification
3030 3034 (clean, actions['noop'], discard),
3031 3035 # Existing file, not tracked anywhere
3032 3036 (unknown, actions['unknown'], discard),
3033 3037 )
3034 3038
3035 3039 for abs, (rel, exact) in sorted(names.items()):
3036 3040 # target file to be touch on disk (relative to cwd)
3037 3041 target = repo.wjoin(abs)
3038 3042 # search the entry in the dispatch table.
3039 3043 # if the file is in any of these sets, it was touched in the working
3040 3044 # directory parent and we are sure it needs to be reverted.
3041 3045 for table, (xlist, msg), dobackup in disptable:
3042 3046 if abs not in table:
3043 3047 continue
3044 3048 if xlist is not None:
3045 3049 xlist.append(abs)
3046 3050 if dobackup and (backup <= dobackup
3047 3051 or wctx[abs].cmp(ctx[abs])):
3048 3052 bakname = "%s.orig" % rel
3049 3053 ui.note(_('saving current version of %s as %s\n') %
3050 3054 (rel, bakname))
3051 3055 if not opts.get('dry_run'):
3052 3056 if interactive:
3053 3057 util.copyfile(target, bakname)
3054 3058 else:
3055 3059 util.rename(target, bakname)
3056 3060 if ui.verbose or not exact:
3057 3061 if not isinstance(msg, basestring):
3058 3062 msg = msg(abs)
3059 3063 ui.status(msg % rel)
3060 3064 elif exact:
3061 3065 ui.warn(msg % rel)
3062 3066 break
3063 3067
3064 3068 if not opts.get('dry_run'):
3065 3069 needdata = ('revert', 'add', 'undelete')
3066 3070 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3067 3071 _performrevert(repo, parents, ctx, actions, interactive)
3068 3072
3069 3073 if targetsubs:
3070 3074 # Revert the subrepos on the revert list
3071 3075 for sub in targetsubs:
3072 3076 try:
3073 3077 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3074 3078 except KeyError:
3075 3079 raise util.Abort("subrepository '%s' does not exist in %s!"
3076 3080 % (sub, short(ctx.node())))
3077 3081 finally:
3078 3082 wlock.release()
3079 3083
3080 3084 def _revertprefetch(repo, ctx, *files):
3081 3085 """Let extension changing the storage layer prefetch content"""
3082 3086 pass
3083 3087
3084 3088 def _performrevert(repo, parents, ctx, actions, interactive=False):
3085 3089 """function that actually perform all the actions computed for revert
3086 3090
3087 3091 This is an independent function to let extension to plug in and react to
3088 3092 the imminent revert.
3089 3093
3090 3094 Make sure you have the working directory locked when calling this function.
3091 3095 """
3092 3096 parent, p2 = parents
3093 3097 node = ctx.node()
3094 3098 def checkout(f):
3095 3099 fc = ctx[f]
3096 3100 return repo.wwrite(f, fc.data(), fc.flags())
3097 3101
3098 3102 audit_path = pathutil.pathauditor(repo.root)
3099 3103 for f in actions['forget'][0]:
3100 3104 repo.dirstate.drop(f)
3101 3105 for f in actions['remove'][0]:
3102 3106 audit_path(f)
3103 3107 try:
3104 3108 util.unlinkpath(repo.wjoin(f))
3105 3109 except OSError:
3106 3110 pass
3107 3111 repo.dirstate.remove(f)
3108 3112 for f in actions['drop'][0]:
3109 3113 audit_path(f)
3110 3114 repo.dirstate.remove(f)
3111 3115
3112 3116 normal = None
3113 3117 if node == parent:
3114 3118 # We're reverting to our parent. If possible, we'd like status
3115 3119 # to report the file as clean. We have to use normallookup for
3116 3120 # merges to avoid losing information about merged/dirty files.
3117 3121 if p2 != nullid:
3118 3122 normal = repo.dirstate.normallookup
3119 3123 else:
3120 3124 normal = repo.dirstate.normal
3121 3125
3122 3126 if interactive:
3123 3127 # Prompt the user for changes to revert
3124 3128 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3125 3129 m = scmutil.match(ctx, torevert, {})
3126 3130 diff = patch.diff(repo, None, ctx.node(), m)
3127 3131 originalchunks = patch.parsepatch(diff)
3128 3132 try:
3129 3133 chunks = recordfilter(repo.ui, originalchunks)
3130 3134 except patch.PatchError, err:
3131 3135 raise util.Abort(_('error parsing patch: %s') % err)
3132 3136
3133 3137 # Apply changes
3134 3138 fp = cStringIO.StringIO()
3135 3139 for c in chunks:
3136 3140 c.write(fp)
3137 3141 dopatch = fp.tell()
3138 3142 fp.seek(0)
3139 3143 if dopatch:
3140 3144 try:
3141 3145 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3142 3146 except patch.PatchError, err:
3143 3147 raise util.Abort(str(err))
3144 3148 del fp
3145 3149 else:
3146 3150 for f in actions['revert'][0]:
3147 3151 wsize = checkout(f)
3148 3152 if normal:
3149 3153 normal(f)
3150 3154 elif wsize == repo.dirstate._map[f][2]:
3151 3155 # changes may be overlooked without normallookup,
3152 3156 # if size isn't changed at reverting
3153 3157 repo.dirstate.normallookup(f)
3154 3158
3155 3159 for f in actions['add'][0]:
3156 3160 checkout(f)
3157 3161 repo.dirstate.add(f)
3158 3162
3159 3163 normal = repo.dirstate.normallookup
3160 3164 if node == parent and p2 == nullid:
3161 3165 normal = repo.dirstate.normal
3162 3166 for f in actions['undelete'][0]:
3163 3167 checkout(f)
3164 3168 normal(f)
3165 3169
3166 3170 copied = copies.pathcopies(repo[parent], ctx)
3167 3171
3168 3172 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3169 3173 if f in copied:
3170 3174 repo.dirstate.copy(copied[f], f)
3171 3175
3172 3176 def command(table):
3173 3177 """Returns a function object to be used as a decorator for making commands.
3174 3178
3175 3179 This function receives a command table as its argument. The table should
3176 3180 be a dict.
3177 3181
3178 3182 The returned function can be used as a decorator for adding commands
3179 3183 to that command table. This function accepts multiple arguments to define
3180 3184 a command.
3181 3185
3182 3186 The first argument is the command name.
3183 3187
3184 3188 The options argument is an iterable of tuples defining command arguments.
3185 3189 See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
3186 3190
3187 3191 The synopsis argument defines a short, one line summary of how to use the
3188 3192 command. This shows up in the help output.
3189 3193
3190 3194 The norepo argument defines whether the command does not require a
3191 3195 local repository. Most commands operate against a repository, thus the
3192 3196 default is False.
3193 3197
3194 3198 The optionalrepo argument defines whether the command optionally requires
3195 3199 a local repository.
3196 3200
3197 3201 The inferrepo argument defines whether to try to find a repository from the
3198 3202 command line arguments. If True, arguments will be examined for potential
3199 3203 repository locations. See ``findrepo()``. If a repository is found, it
3200 3204 will be used.
3201 3205 """
3202 3206 def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False,
3203 3207 inferrepo=False):
3204 3208 def decorator(func):
3205 3209 if synopsis:
3206 3210 table[name] = func, list(options), synopsis
3207 3211 else:
3208 3212 table[name] = func, list(options)
3209 3213
3210 3214 if norepo:
3211 3215 # Avoid import cycle.
3212 3216 import commands
3213 3217 commands.norepo += ' %s' % ' '.join(parsealiases(name))
3214 3218
3215 3219 if optionalrepo:
3216 3220 import commands
3217 3221 commands.optionalrepo += ' %s' % ' '.join(parsealiases(name))
3218 3222
3219 3223 if inferrepo:
3220 3224 import commands
3221 3225 commands.inferrepo += ' %s' % ' '.join(parsealiases(name))
3222 3226
3223 3227 return func
3224 3228 return decorator
3225 3229
3226 3230 return cmd
3227 3231
3228 3232 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3229 3233 # commands.outgoing. "missing" is "missing" of the result of
3230 3234 # "findcommonoutgoing()"
3231 3235 outgoinghooks = util.hooks()
3232 3236
3233 3237 # a list of (ui, repo) functions called by commands.summary
3234 3238 summaryhooks = util.hooks()
3235 3239
3236 3240 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3237 3241 #
3238 3242 # functions should return tuple of booleans below, if 'changes' is None:
3239 3243 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3240 3244 #
3241 3245 # otherwise, 'changes' is a tuple of tuples below:
3242 3246 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3243 3247 # - (desturl, destbranch, destpeer, outgoing)
3244 3248 summaryremotehooks = util.hooks()
3245 3249
3246 3250 # A list of state files kept by multistep operations like graft.
3247 3251 # Since graft cannot be aborted, it is considered 'clearable' by update.
3248 3252 # note: bisect is intentionally excluded
3249 3253 # (state file, clearable, allowcommit, error, hint)
3250 3254 unfinishedstates = [
3251 3255 ('graftstate', True, False, _('graft in progress'),
3252 3256 _("use 'hg graft --continue' or 'hg update' to abort")),
3253 3257 ('updatestate', True, False, _('last update was interrupted'),
3254 3258 _("use 'hg update' to get a consistent checkout"))
3255 3259 ]
3256 3260
3257 3261 def checkunfinished(repo, commit=False):
3258 3262 '''Look for an unfinished multistep operation, like graft, and abort
3259 3263 if found. It's probably good to check this right before
3260 3264 bailifchanged().
3261 3265 '''
3262 3266 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3263 3267 if commit and allowcommit:
3264 3268 continue
3265 3269 if repo.vfs.exists(f):
3266 3270 raise util.Abort(msg, hint=hint)
3267 3271
3268 3272 def clearunfinished(repo):
3269 3273 '''Check for unfinished operations (as above), and clear the ones
3270 3274 that are clearable.
3271 3275 '''
3272 3276 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3273 3277 if not clearable and repo.vfs.exists(f):
3274 3278 raise util.Abort(msg, hint=hint)
3275 3279 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3276 3280 if clearable and repo.vfs.exists(f):
3277 3281 util.unlink(repo.join(f))
3278 3282
3279 3283 class dirstateguard(object):
3280 3284 '''Restore dirstate at unexpected failure.
3281 3285
3282 3286 At the construction, this class does:
3283 3287
3284 3288 - write current ``repo.dirstate`` out, and
3285 3289 - save ``.hg/dirstate`` into the backup file
3286 3290
3287 3291 This restores ``.hg/dirstate`` from backup file, if ``release()``
3288 3292 is invoked before ``close()``.
3289 3293
3290 3294 This just removes the backup file at ``close()`` before ``release()``.
3291 3295 '''
3292 3296
3293 3297 def __init__(self, repo, name):
3294 3298 repo.dirstate.write()
3295 3299 self._repo = repo
3296 3300 self._filename = 'dirstate.backup.%s.%d' % (name, id(self))
3297 3301 repo.vfs.write(self._filename, repo.vfs.tryread('dirstate'))
3298 3302 self._active = True
3299 3303 self._closed = False
3300 3304
3301 3305 def __del__(self):
3302 3306 if self._active: # still active
3303 3307 # this may occur, even if this class is used correctly:
3304 3308 # for example, releasing other resources like transaction
3305 3309 # may raise exception before ``dirstateguard.release`` in
3306 3310 # ``release(tr, ....)``.
3307 3311 self._abort()
3308 3312
3309 3313 def close(self):
3310 3314 if not self._active: # already inactivated
3311 3315 msg = (_("can't close already inactivated backup: %s")
3312 3316 % self._filename)
3313 3317 raise util.Abort(msg)
3314 3318
3315 3319 self._repo.vfs.unlink(self._filename)
3316 3320 self._active = False
3317 3321 self._closed = True
3318 3322
3319 3323 def _abort(self):
3320 3324 # this "invalidate()" prevents "wlock.release()" from writing
3321 3325 # changes of dirstate out after restoring to original status
3322 3326 self._repo.dirstate.invalidate()
3323 3327
3324 3328 self._repo.vfs.rename(self._filename, 'dirstate')
3325 3329 self._active = False
3326 3330
3327 3331 def release(self):
3328 3332 if not self._closed:
3329 3333 if not self._active: # already inactivated
3330 3334 msg = (_("can't release already inactivated backup: %s")
3331 3335 % self._filename)
3332 3336 raise util.Abort(msg)
3333 3337 self._abort()
General Comments 0
You need to be logged in to leave comments. Login now