##// END OF EJS Templates
export: introduce a generic way to add patch header on export...
Pierre-Yves David -
r26545:e99c3846 default
parent child Browse files
Show More
@@ -1,3385 +1,3398
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import hex, bin, nullid, nullrev, short
9 9 from i18n import _
10 10 import os, sys, errno, re, tempfile, cStringIO, shutil
11 11 import util, scmutil, templater, patch, error, templatekw, revlog, copies
12 12 import match as matchmod
13 13 import repair, graphmod, revset, phases, obsolete, pathutil
14 14 import changelog
15 15 import bookmarks
16 16 import encoding
17 17 import formatter
18 18 import crecord as crecordmod
19 19 import lock as lockmod
20 20
21 21 def ishunk(x):
22 22 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
23 23 return isinstance(x, hunkclasses)
24 24
25 25 def newandmodified(chunks, originalchunks):
26 26 newlyaddedandmodifiedfiles = set()
27 27 for chunk in chunks:
28 28 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
29 29 originalchunks:
30 30 newlyaddedandmodifiedfiles.add(chunk.header.filename())
31 31 return newlyaddedandmodifiedfiles
32 32
33 33 def parsealiases(cmd):
34 34 return cmd.lstrip("^").split("|")
35 35
36 36 def setupwrapcolorwrite(ui):
37 37 # wrap ui.write so diff output can be labeled/colorized
38 38 def wrapwrite(orig, *args, **kw):
39 39 label = kw.pop('label', '')
40 40 for chunk, l in patch.difflabel(lambda: args):
41 41 orig(chunk, label=label + l)
42 42
43 43 oldwrite = ui.write
44 44 def wrap(*args, **kwargs):
45 45 return wrapwrite(oldwrite, *args, **kwargs)
46 46 setattr(ui, 'write', wrap)
47 47 return oldwrite
48 48
49 49 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
50 50 if usecurses:
51 51 if testfile:
52 52 recordfn = crecordmod.testdecorator(testfile,
53 53 crecordmod.testchunkselector)
54 54 else:
55 55 recordfn = crecordmod.chunkselector
56 56
57 57 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
58 58
59 59 else:
60 60 return patch.filterpatch(ui, originalhunks, operation)
61 61
62 62 def recordfilter(ui, originalhunks, operation=None):
63 63 """ Prompts the user to filter the originalhunks and return a list of
64 64 selected hunks.
65 65 *operation* is used for ui purposes to indicate the user
66 66 what kind of filtering they are doing: reverting, commiting, shelving, etc.
67 67 *operation* has to be a translated string.
68 68 """
69 69 usecurses = ui.configbool('experimental', 'crecord', False)
70 70 testfile = ui.config('experimental', 'crecordtest', None)
71 71 oldwrite = setupwrapcolorwrite(ui)
72 72 try:
73 73 newchunks = filterchunks(ui, originalhunks, usecurses, testfile,
74 74 operation)
75 75 finally:
76 76 ui.write = oldwrite
77 77 return newchunks
78 78
79 79 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
80 80 filterfn, *pats, **opts):
81 81 import merge as mergemod
82 82
83 83 if not ui.interactive():
84 84 if cmdsuggest:
85 85 msg = _('running non-interactively, use %s instead') % cmdsuggest
86 86 else:
87 87 msg = _('running non-interactively')
88 88 raise util.Abort(msg)
89 89
90 90 # make sure username is set before going interactive
91 91 if not opts.get('user'):
92 92 ui.username() # raise exception, username not provided
93 93
94 94 def recordfunc(ui, repo, message, match, opts):
95 95 """This is generic record driver.
96 96
97 97 Its job is to interactively filter local changes, and
98 98 accordingly prepare working directory into a state in which the
99 99 job can be delegated to a non-interactive commit command such as
100 100 'commit' or 'qrefresh'.
101 101
102 102 After the actual job is done by non-interactive command, the
103 103 working directory is restored to its original state.
104 104
105 105 In the end we'll record interesting changes, and everything else
106 106 will be left in place, so the user can continue working.
107 107 """
108 108
109 109 checkunfinished(repo, commit=True)
110 110 merge = len(repo[None].parents()) > 1
111 111 if merge:
112 112 raise util.Abort(_('cannot partially commit a merge '
113 113 '(use "hg commit" instead)'))
114 114
115 115 status = repo.status(match=match)
116 116 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
117 117 diffopts.nodates = True
118 118 diffopts.git = True
119 119 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
120 120 originalchunks = patch.parsepatch(originaldiff)
121 121
122 122 # 1. filter patch, so we have intending-to apply subset of it
123 123 try:
124 124 chunks = filterfn(ui, originalchunks)
125 125 except patch.PatchError as err:
126 126 raise util.Abort(_('error parsing patch: %s') % err)
127 127
128 128 # We need to keep a backup of files that have been newly added and
129 129 # modified during the recording process because there is a previous
130 130 # version without the edit in the workdir
131 131 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
132 132 contenders = set()
133 133 for h in chunks:
134 134 try:
135 135 contenders.update(set(h.files()))
136 136 except AttributeError:
137 137 pass
138 138
139 139 changed = status.modified + status.added + status.removed
140 140 newfiles = [f for f in changed if f in contenders]
141 141 if not newfiles:
142 142 ui.status(_('no changes to record\n'))
143 143 return 0
144 144
145 145 modified = set(status.modified)
146 146
147 147 # 2. backup changed files, so we can restore them in the end
148 148
149 149 if backupall:
150 150 tobackup = changed
151 151 else:
152 152 tobackup = [f for f in newfiles if f in modified or f in \
153 153 newlyaddedandmodifiedfiles]
154 154 backups = {}
155 155 if tobackup:
156 156 backupdir = repo.join('record-backups')
157 157 try:
158 158 os.mkdir(backupdir)
159 159 except OSError as err:
160 160 if err.errno != errno.EEXIST:
161 161 raise
162 162 try:
163 163 # backup continues
164 164 for f in tobackup:
165 165 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
166 166 dir=backupdir)
167 167 os.close(fd)
168 168 ui.debug('backup %r as %r\n' % (f, tmpname))
169 169 util.copyfile(repo.wjoin(f), tmpname)
170 170 shutil.copystat(repo.wjoin(f), tmpname)
171 171 backups[f] = tmpname
172 172
173 173 fp = cStringIO.StringIO()
174 174 for c in chunks:
175 175 fname = c.filename()
176 176 if fname in backups:
177 177 c.write(fp)
178 178 dopatch = fp.tell()
179 179 fp.seek(0)
180 180
181 181 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
182 182 # 3a. apply filtered patch to clean repo (clean)
183 183 if backups:
184 184 # Equivalent to hg.revert
185 185 choices = lambda key: key in backups
186 186 mergemod.update(repo, repo.dirstate.p1(),
187 187 False, True, choices)
188 188
189 189 # 3b. (apply)
190 190 if dopatch:
191 191 try:
192 192 ui.debug('applying patch\n')
193 193 ui.debug(fp.getvalue())
194 194 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
195 195 except patch.PatchError as err:
196 196 raise util.Abort(str(err))
197 197 del fp
198 198
199 199 # 4. We prepared working directory according to filtered
200 200 # patch. Now is the time to delegate the job to
201 201 # commit/qrefresh or the like!
202 202
203 203 # Make all of the pathnames absolute.
204 204 newfiles = [repo.wjoin(nf) for nf in newfiles]
205 205 return commitfunc(ui, repo, *newfiles, **opts)
206 206 finally:
207 207 # 5. finally restore backed-up files
208 208 try:
209 209 dirstate = repo.dirstate
210 210 for realname, tmpname in backups.iteritems():
211 211 ui.debug('restoring %r to %r\n' % (tmpname, realname))
212 212
213 213 if dirstate[realname] == 'n':
214 214 # without normallookup, restoring timestamp
215 215 # may cause partially committed files
216 216 # to be treated as unmodified
217 217 dirstate.normallookup(realname)
218 218
219 219 util.copyfile(tmpname, repo.wjoin(realname))
220 220 # Our calls to copystat() here and above are a
221 221 # hack to trick any editors that have f open that
222 222 # we haven't modified them.
223 223 #
224 224 # Also note that this racy as an editor could
225 225 # notice the file's mtime before we've finished
226 226 # writing it.
227 227 shutil.copystat(tmpname, repo.wjoin(realname))
228 228 os.unlink(tmpname)
229 229 if tobackup:
230 230 os.rmdir(backupdir)
231 231 except OSError:
232 232 pass
233 233
234 234 def recordinwlock(ui, repo, message, match, opts):
235 235 wlock = repo.wlock()
236 236 try:
237 237 return recordfunc(ui, repo, message, match, opts)
238 238 finally:
239 239 wlock.release()
240 240
241 241 return commit(ui, repo, recordinwlock, pats, opts)
242 242
243 243 def findpossible(cmd, table, strict=False):
244 244 """
245 245 Return cmd -> (aliases, command table entry)
246 246 for each matching command.
247 247 Return debug commands (or their aliases) only if no normal command matches.
248 248 """
249 249 choice = {}
250 250 debugchoice = {}
251 251
252 252 if cmd in table:
253 253 # short-circuit exact matches, "log" alias beats "^log|history"
254 254 keys = [cmd]
255 255 else:
256 256 keys = table.keys()
257 257
258 258 allcmds = []
259 259 for e in keys:
260 260 aliases = parsealiases(e)
261 261 allcmds.extend(aliases)
262 262 found = None
263 263 if cmd in aliases:
264 264 found = cmd
265 265 elif not strict:
266 266 for a in aliases:
267 267 if a.startswith(cmd):
268 268 found = a
269 269 break
270 270 if found is not None:
271 271 if aliases[0].startswith("debug") or found.startswith("debug"):
272 272 debugchoice[found] = (aliases, table[e])
273 273 else:
274 274 choice[found] = (aliases, table[e])
275 275
276 276 if not choice and debugchoice:
277 277 choice = debugchoice
278 278
279 279 return choice, allcmds
280 280
281 281 def findcmd(cmd, table, strict=True):
282 282 """Return (aliases, command table entry) for command string."""
283 283 choice, allcmds = findpossible(cmd, table, strict)
284 284
285 285 if cmd in choice:
286 286 return choice[cmd]
287 287
288 288 if len(choice) > 1:
289 289 clist = choice.keys()
290 290 clist.sort()
291 291 raise error.AmbiguousCommand(cmd, clist)
292 292
293 293 if choice:
294 294 return choice.values()[0]
295 295
296 296 raise error.UnknownCommand(cmd, allcmds)
297 297
298 298 def findrepo(p):
299 299 while not os.path.isdir(os.path.join(p, ".hg")):
300 300 oldp, p = p, os.path.dirname(p)
301 301 if p == oldp:
302 302 return None
303 303
304 304 return p
305 305
306 306 def bailifchanged(repo, merge=True):
307 307 if merge and repo.dirstate.p2() != nullid:
308 308 raise util.Abort(_('outstanding uncommitted merge'))
309 309 modified, added, removed, deleted = repo.status()[:4]
310 310 if modified or added or removed or deleted:
311 311 raise util.Abort(_('uncommitted changes'))
312 312 ctx = repo[None]
313 313 for s in sorted(ctx.substate):
314 314 ctx.sub(s).bailifchanged()
315 315
316 316 def logmessage(ui, opts):
317 317 """ get the log message according to -m and -l option """
318 318 message = opts.get('message')
319 319 logfile = opts.get('logfile')
320 320
321 321 if message and logfile:
322 322 raise util.Abort(_('options --message and --logfile are mutually '
323 323 'exclusive'))
324 324 if not message and logfile:
325 325 try:
326 326 if logfile == '-':
327 327 message = ui.fin.read()
328 328 else:
329 329 message = '\n'.join(util.readfile(logfile).splitlines())
330 330 except IOError as inst:
331 331 raise util.Abort(_("can't read commit message '%s': %s") %
332 332 (logfile, inst.strerror))
333 333 return message
334 334
335 335 def mergeeditform(ctxorbool, baseformname):
336 336 """return appropriate editform name (referencing a committemplate)
337 337
338 338 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
339 339 merging is committed.
340 340
341 341 This returns baseformname with '.merge' appended if it is a merge,
342 342 otherwise '.normal' is appended.
343 343 """
344 344 if isinstance(ctxorbool, bool):
345 345 if ctxorbool:
346 346 return baseformname + ".merge"
347 347 elif 1 < len(ctxorbool.parents()):
348 348 return baseformname + ".merge"
349 349
350 350 return baseformname + ".normal"
351 351
352 352 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
353 353 editform='', **opts):
354 354 """get appropriate commit message editor according to '--edit' option
355 355
356 356 'finishdesc' is a function to be called with edited commit message
357 357 (= 'description' of the new changeset) just after editing, but
358 358 before checking empty-ness. It should return actual text to be
359 359 stored into history. This allows to change description before
360 360 storing.
361 361
362 362 'extramsg' is a extra message to be shown in the editor instead of
363 363 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
364 364 is automatically added.
365 365
366 366 'editform' is a dot-separated list of names, to distinguish
367 367 the purpose of commit text editing.
368 368
369 369 'getcommiteditor' returns 'commitforceeditor' regardless of
370 370 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
371 371 they are specific for usage in MQ.
372 372 """
373 373 if edit or finishdesc or extramsg:
374 374 return lambda r, c, s: commitforceeditor(r, c, s,
375 375 finishdesc=finishdesc,
376 376 extramsg=extramsg,
377 377 editform=editform)
378 378 elif editform:
379 379 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
380 380 else:
381 381 return commiteditor
382 382
383 383 def loglimit(opts):
384 384 """get the log limit according to option -l/--limit"""
385 385 limit = opts.get('limit')
386 386 if limit:
387 387 try:
388 388 limit = int(limit)
389 389 except ValueError:
390 390 raise util.Abort(_('limit must be a positive integer'))
391 391 if limit <= 0:
392 392 raise util.Abort(_('limit must be positive'))
393 393 else:
394 394 limit = None
395 395 return limit
396 396
397 397 def makefilename(repo, pat, node, desc=None,
398 398 total=None, seqno=None, revwidth=None, pathname=None):
399 399 node_expander = {
400 400 'H': lambda: hex(node),
401 401 'R': lambda: str(repo.changelog.rev(node)),
402 402 'h': lambda: short(node),
403 403 'm': lambda: re.sub('[^\w]', '_', str(desc))
404 404 }
405 405 expander = {
406 406 '%': lambda: '%',
407 407 'b': lambda: os.path.basename(repo.root),
408 408 }
409 409
410 410 try:
411 411 if node:
412 412 expander.update(node_expander)
413 413 if node:
414 414 expander['r'] = (lambda:
415 415 str(repo.changelog.rev(node)).zfill(revwidth or 0))
416 416 if total is not None:
417 417 expander['N'] = lambda: str(total)
418 418 if seqno is not None:
419 419 expander['n'] = lambda: str(seqno)
420 420 if total is not None and seqno is not None:
421 421 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
422 422 if pathname is not None:
423 423 expander['s'] = lambda: os.path.basename(pathname)
424 424 expander['d'] = lambda: os.path.dirname(pathname) or '.'
425 425 expander['p'] = lambda: pathname
426 426
427 427 newname = []
428 428 patlen = len(pat)
429 429 i = 0
430 430 while i < patlen:
431 431 c = pat[i]
432 432 if c == '%':
433 433 i += 1
434 434 c = pat[i]
435 435 c = expander[c]()
436 436 newname.append(c)
437 437 i += 1
438 438 return ''.join(newname)
439 439 except KeyError as inst:
440 440 raise util.Abort(_("invalid format spec '%%%s' in output filename") %
441 441 inst.args[0])
442 442
443 443 def makefileobj(repo, pat, node=None, desc=None, total=None,
444 444 seqno=None, revwidth=None, mode='wb', modemap=None,
445 445 pathname=None):
446 446
447 447 writable = mode not in ('r', 'rb')
448 448
449 449 if not pat or pat == '-':
450 450 if writable:
451 451 fp = repo.ui.fout
452 452 else:
453 453 fp = repo.ui.fin
454 454 if util.safehasattr(fp, 'fileno'):
455 455 return os.fdopen(os.dup(fp.fileno()), mode)
456 456 else:
457 457 # if this fp can't be duped properly, return
458 458 # a dummy object that can be closed
459 459 class wrappedfileobj(object):
460 460 noop = lambda x: None
461 461 def __init__(self, f):
462 462 self.f = f
463 463 def __getattr__(self, attr):
464 464 if attr == 'close':
465 465 return self.noop
466 466 else:
467 467 return getattr(self.f, attr)
468 468
469 469 return wrappedfileobj(fp)
470 470 if util.safehasattr(pat, 'write') and writable:
471 471 return pat
472 472 if util.safehasattr(pat, 'read') and 'r' in mode:
473 473 return pat
474 474 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
475 475 if modemap is not None:
476 476 mode = modemap.get(fn, mode)
477 477 if mode == 'wb':
478 478 modemap[fn] = 'ab'
479 479 return open(fn, mode)
480 480
481 481 def openrevlog(repo, cmd, file_, opts):
482 482 """opens the changelog, manifest, a filelog or a given revlog"""
483 483 cl = opts['changelog']
484 484 mf = opts['manifest']
485 485 dir = opts['dir']
486 486 msg = None
487 487 if cl and mf:
488 488 msg = _('cannot specify --changelog and --manifest at the same time')
489 489 elif cl and dir:
490 490 msg = _('cannot specify --changelog and --dir at the same time')
491 491 elif cl or mf:
492 492 if file_:
493 493 msg = _('cannot specify filename with --changelog or --manifest')
494 494 elif not repo:
495 495 msg = _('cannot specify --changelog or --manifest or --dir '
496 496 'without a repository')
497 497 if msg:
498 498 raise util.Abort(msg)
499 499
500 500 r = None
501 501 if repo:
502 502 if cl:
503 503 r = repo.unfiltered().changelog
504 504 elif dir:
505 505 if 'treemanifest' not in repo.requirements:
506 506 raise util.Abort(_("--dir can only be used on repos with "
507 507 "treemanifest enabled"))
508 508 dirlog = repo.dirlog(file_)
509 509 if len(dirlog):
510 510 r = dirlog
511 511 elif mf:
512 512 r = repo.manifest
513 513 elif file_:
514 514 filelog = repo.file(file_)
515 515 if len(filelog):
516 516 r = filelog
517 517 if not r:
518 518 if not file_:
519 519 raise error.CommandError(cmd, _('invalid arguments'))
520 520 if not os.path.isfile(file_):
521 521 raise util.Abort(_("revlog '%s' not found") % file_)
522 522 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
523 523 file_[:-2] + ".i")
524 524 return r
525 525
526 526 def copy(ui, repo, pats, opts, rename=False):
527 527 # called with the repo lock held
528 528 #
529 529 # hgsep => pathname that uses "/" to separate directories
530 530 # ossep => pathname that uses os.sep to separate directories
531 531 cwd = repo.getcwd()
532 532 targets = {}
533 533 after = opts.get("after")
534 534 dryrun = opts.get("dry_run")
535 535 wctx = repo[None]
536 536
537 537 def walkpat(pat):
538 538 srcs = []
539 539 if after:
540 540 badstates = '?'
541 541 else:
542 542 badstates = '?r'
543 543 m = scmutil.match(repo[None], [pat], opts, globbed=True)
544 544 for abs in repo.walk(m):
545 545 state = repo.dirstate[abs]
546 546 rel = m.rel(abs)
547 547 exact = m.exact(abs)
548 548 if state in badstates:
549 549 if exact and state == '?':
550 550 ui.warn(_('%s: not copying - file is not managed\n') % rel)
551 551 if exact and state == 'r':
552 552 ui.warn(_('%s: not copying - file has been marked for'
553 553 ' remove\n') % rel)
554 554 continue
555 555 # abs: hgsep
556 556 # rel: ossep
557 557 srcs.append((abs, rel, exact))
558 558 return srcs
559 559
560 560 # abssrc: hgsep
561 561 # relsrc: ossep
562 562 # otarget: ossep
563 563 def copyfile(abssrc, relsrc, otarget, exact):
564 564 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
565 565 if '/' in abstarget:
566 566 # We cannot normalize abstarget itself, this would prevent
567 567 # case only renames, like a => A.
568 568 abspath, absname = abstarget.rsplit('/', 1)
569 569 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
570 570 reltarget = repo.pathto(abstarget, cwd)
571 571 target = repo.wjoin(abstarget)
572 572 src = repo.wjoin(abssrc)
573 573 state = repo.dirstate[abstarget]
574 574
575 575 scmutil.checkportable(ui, abstarget)
576 576
577 577 # check for collisions
578 578 prevsrc = targets.get(abstarget)
579 579 if prevsrc is not None:
580 580 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
581 581 (reltarget, repo.pathto(abssrc, cwd),
582 582 repo.pathto(prevsrc, cwd)))
583 583 return
584 584
585 585 # check for overwrites
586 586 exists = os.path.lexists(target)
587 587 samefile = False
588 588 if exists and abssrc != abstarget:
589 589 if (repo.dirstate.normalize(abssrc) ==
590 590 repo.dirstate.normalize(abstarget)):
591 591 if not rename:
592 592 ui.warn(_("%s: can't copy - same file\n") % reltarget)
593 593 return
594 594 exists = False
595 595 samefile = True
596 596
597 597 if not after and exists or after and state in 'mn':
598 598 if not opts['force']:
599 599 ui.warn(_('%s: not overwriting - file exists\n') %
600 600 reltarget)
601 601 return
602 602
603 603 if after:
604 604 if not exists:
605 605 if rename:
606 606 ui.warn(_('%s: not recording move - %s does not exist\n') %
607 607 (relsrc, reltarget))
608 608 else:
609 609 ui.warn(_('%s: not recording copy - %s does not exist\n') %
610 610 (relsrc, reltarget))
611 611 return
612 612 elif not dryrun:
613 613 try:
614 614 if exists:
615 615 os.unlink(target)
616 616 targetdir = os.path.dirname(target) or '.'
617 617 if not os.path.isdir(targetdir):
618 618 os.makedirs(targetdir)
619 619 if samefile:
620 620 tmp = target + "~hgrename"
621 621 os.rename(src, tmp)
622 622 os.rename(tmp, target)
623 623 else:
624 624 util.copyfile(src, target)
625 625 srcexists = True
626 626 except IOError as inst:
627 627 if inst.errno == errno.ENOENT:
628 628 ui.warn(_('%s: deleted in working directory\n') % relsrc)
629 629 srcexists = False
630 630 else:
631 631 ui.warn(_('%s: cannot copy - %s\n') %
632 632 (relsrc, inst.strerror))
633 633 return True # report a failure
634 634
635 635 if ui.verbose or not exact:
636 636 if rename:
637 637 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
638 638 else:
639 639 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
640 640
641 641 targets[abstarget] = abssrc
642 642
643 643 # fix up dirstate
644 644 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
645 645 dryrun=dryrun, cwd=cwd)
646 646 if rename and not dryrun:
647 647 if not after and srcexists and not samefile:
648 648 util.unlinkpath(repo.wjoin(abssrc))
649 649 wctx.forget([abssrc])
650 650
651 651 # pat: ossep
652 652 # dest ossep
653 653 # srcs: list of (hgsep, hgsep, ossep, bool)
654 654 # return: function that takes hgsep and returns ossep
655 655 def targetpathfn(pat, dest, srcs):
656 656 if os.path.isdir(pat):
657 657 abspfx = pathutil.canonpath(repo.root, cwd, pat)
658 658 abspfx = util.localpath(abspfx)
659 659 if destdirexists:
660 660 striplen = len(os.path.split(abspfx)[0])
661 661 else:
662 662 striplen = len(abspfx)
663 663 if striplen:
664 664 striplen += len(os.sep)
665 665 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
666 666 elif destdirexists:
667 667 res = lambda p: os.path.join(dest,
668 668 os.path.basename(util.localpath(p)))
669 669 else:
670 670 res = lambda p: dest
671 671 return res
672 672
673 673 # pat: ossep
674 674 # dest ossep
675 675 # srcs: list of (hgsep, hgsep, ossep, bool)
676 676 # return: function that takes hgsep and returns ossep
677 677 def targetpathafterfn(pat, dest, srcs):
678 678 if matchmod.patkind(pat):
679 679 # a mercurial pattern
680 680 res = lambda p: os.path.join(dest,
681 681 os.path.basename(util.localpath(p)))
682 682 else:
683 683 abspfx = pathutil.canonpath(repo.root, cwd, pat)
684 684 if len(abspfx) < len(srcs[0][0]):
685 685 # A directory. Either the target path contains the last
686 686 # component of the source path or it does not.
687 687 def evalpath(striplen):
688 688 score = 0
689 689 for s in srcs:
690 690 t = os.path.join(dest, util.localpath(s[0])[striplen:])
691 691 if os.path.lexists(t):
692 692 score += 1
693 693 return score
694 694
695 695 abspfx = util.localpath(abspfx)
696 696 striplen = len(abspfx)
697 697 if striplen:
698 698 striplen += len(os.sep)
699 699 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
700 700 score = evalpath(striplen)
701 701 striplen1 = len(os.path.split(abspfx)[0])
702 702 if striplen1:
703 703 striplen1 += len(os.sep)
704 704 if evalpath(striplen1) > score:
705 705 striplen = striplen1
706 706 res = lambda p: os.path.join(dest,
707 707 util.localpath(p)[striplen:])
708 708 else:
709 709 # a file
710 710 if destdirexists:
711 711 res = lambda p: os.path.join(dest,
712 712 os.path.basename(util.localpath(p)))
713 713 else:
714 714 res = lambda p: dest
715 715 return res
716 716
717 717 pats = scmutil.expandpats(pats)
718 718 if not pats:
719 719 raise util.Abort(_('no source or destination specified'))
720 720 if len(pats) == 1:
721 721 raise util.Abort(_('no destination specified'))
722 722 dest = pats.pop()
723 723 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
724 724 if not destdirexists:
725 725 if len(pats) > 1 or matchmod.patkind(pats[0]):
726 726 raise util.Abort(_('with multiple sources, destination must be an '
727 727 'existing directory'))
728 728 if util.endswithsep(dest):
729 729 raise util.Abort(_('destination %s is not a directory') % dest)
730 730
731 731 tfn = targetpathfn
732 732 if after:
733 733 tfn = targetpathafterfn
734 734 copylist = []
735 735 for pat in pats:
736 736 srcs = walkpat(pat)
737 737 if not srcs:
738 738 continue
739 739 copylist.append((tfn(pat, dest, srcs), srcs))
740 740 if not copylist:
741 741 raise util.Abort(_('no files to copy'))
742 742
743 743 errors = 0
744 744 for targetpath, srcs in copylist:
745 745 for abssrc, relsrc, exact in srcs:
746 746 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
747 747 errors += 1
748 748
749 749 if errors:
750 750 ui.warn(_('(consider using --after)\n'))
751 751
752 752 return errors != 0
753 753
754 754 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
755 755 runargs=None, appendpid=False):
756 756 '''Run a command as a service.'''
757 757
758 758 def writepid(pid):
759 759 if opts['pid_file']:
760 760 if appendpid:
761 761 mode = 'a'
762 762 else:
763 763 mode = 'w'
764 764 fp = open(opts['pid_file'], mode)
765 765 fp.write(str(pid) + '\n')
766 766 fp.close()
767 767
768 768 if opts['daemon'] and not opts['daemon_pipefds']:
769 769 # Signal child process startup with file removal
770 770 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
771 771 os.close(lockfd)
772 772 try:
773 773 if not runargs:
774 774 runargs = util.hgcmd() + sys.argv[1:]
775 775 runargs.append('--daemon-pipefds=%s' % lockpath)
776 776 # Don't pass --cwd to the child process, because we've already
777 777 # changed directory.
778 778 for i in xrange(1, len(runargs)):
779 779 if runargs[i].startswith('--cwd='):
780 780 del runargs[i]
781 781 break
782 782 elif runargs[i].startswith('--cwd'):
783 783 del runargs[i:i + 2]
784 784 break
785 785 def condfn():
786 786 return not os.path.exists(lockpath)
787 787 pid = util.rundetached(runargs, condfn)
788 788 if pid < 0:
789 789 raise util.Abort(_('child process failed to start'))
790 790 writepid(pid)
791 791 finally:
792 792 try:
793 793 os.unlink(lockpath)
794 794 except OSError as e:
795 795 if e.errno != errno.ENOENT:
796 796 raise
797 797 if parentfn:
798 798 return parentfn(pid)
799 799 else:
800 800 return
801 801
802 802 if initfn:
803 803 initfn()
804 804
805 805 if not opts['daemon']:
806 806 writepid(os.getpid())
807 807
808 808 if opts['daemon_pipefds']:
809 809 lockpath = opts['daemon_pipefds']
810 810 try:
811 811 os.setsid()
812 812 except AttributeError:
813 813 pass
814 814 os.unlink(lockpath)
815 815 util.hidewindow()
816 816 sys.stdout.flush()
817 817 sys.stderr.flush()
818 818
819 819 nullfd = os.open(os.devnull, os.O_RDWR)
820 820 logfilefd = nullfd
821 821 if logfile:
822 822 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
823 823 os.dup2(nullfd, 0)
824 824 os.dup2(logfilefd, 1)
825 825 os.dup2(logfilefd, 2)
826 826 if nullfd not in (0, 1, 2):
827 827 os.close(nullfd)
828 828 if logfile and logfilefd not in (0, 1, 2):
829 829 os.close(logfilefd)
830 830
831 831 if runfn:
832 832 return runfn()
833 833
834 834 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
835 835 """Utility function used by commands.import to import a single patch
836 836
837 837 This function is explicitly defined here to help the evolve extension to
838 838 wrap this part of the import logic.
839 839
840 840 The API is currently a bit ugly because it a simple code translation from
841 841 the import command. Feel free to make it better.
842 842
843 843 :hunk: a patch (as a binary string)
844 844 :parents: nodes that will be parent of the created commit
845 845 :opts: the full dict of option passed to the import command
846 846 :msgs: list to save commit message to.
847 847 (used in case we need to save it when failing)
848 848 :updatefunc: a function that update a repo to a given node
849 849 updatefunc(<repo>, <node>)
850 850 """
851 851 # avoid cycle context -> subrepo -> cmdutil
852 852 import context
853 853 tmpname, message, user, date, branch, nodeid, p1, p2 = \
854 854 patch.extract(ui, hunk)
855 855
856 856 update = not opts.get('bypass')
857 857 strip = opts["strip"]
858 858 prefix = opts["prefix"]
859 859 sim = float(opts.get('similarity') or 0)
860 860 if not tmpname:
861 861 return (None, None, False)
862 862 msg = _('applied to working directory')
863 863
864 864 rejects = False
865 865 dsguard = None
866 866
867 867 try:
868 868 cmdline_message = logmessage(ui, opts)
869 869 if cmdline_message:
870 870 # pickup the cmdline msg
871 871 message = cmdline_message
872 872 elif message:
873 873 # pickup the patch msg
874 874 message = message.strip()
875 875 else:
876 876 # launch the editor
877 877 message = None
878 878 ui.debug('message:\n%s\n' % message)
879 879
880 880 if len(parents) == 1:
881 881 parents.append(repo[nullid])
882 882 if opts.get('exact'):
883 883 if not nodeid or not p1:
884 884 raise util.Abort(_('not a Mercurial patch'))
885 885 p1 = repo[p1]
886 886 p2 = repo[p2 or nullid]
887 887 elif p2:
888 888 try:
889 889 p1 = repo[p1]
890 890 p2 = repo[p2]
891 891 # Without any options, consider p2 only if the
892 892 # patch is being applied on top of the recorded
893 893 # first parent.
894 894 if p1 != parents[0]:
895 895 p1 = parents[0]
896 896 p2 = repo[nullid]
897 897 except error.RepoError:
898 898 p1, p2 = parents
899 899 if p2.node() == nullid:
900 900 ui.warn(_("warning: import the patch as a normal revision\n"
901 901 "(use --exact to import the patch as a merge)\n"))
902 902 else:
903 903 p1, p2 = parents
904 904
905 905 n = None
906 906 if update:
907 907 dsguard = dirstateguard(repo, 'tryimportone')
908 908 if p1 != parents[0]:
909 909 updatefunc(repo, p1.node())
910 910 if p2 != parents[1]:
911 911 repo.setparents(p1.node(), p2.node())
912 912
913 913 if opts.get('exact') or opts.get('import_branch'):
914 914 repo.dirstate.setbranch(branch or 'default')
915 915
916 916 partial = opts.get('partial', False)
917 917 files = set()
918 918 try:
919 919 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
920 920 files=files, eolmode=None, similarity=sim / 100.0)
921 921 except patch.PatchError as e:
922 922 if not partial:
923 923 raise util.Abort(str(e))
924 924 if partial:
925 925 rejects = True
926 926
927 927 files = list(files)
928 928 if opts.get('no_commit'):
929 929 if message:
930 930 msgs.append(message)
931 931 else:
932 932 if opts.get('exact') or p2:
933 933 # If you got here, you either use --force and know what
934 934 # you are doing or used --exact or a merge patch while
935 935 # being updated to its first parent.
936 936 m = None
937 937 else:
938 938 m = scmutil.matchfiles(repo, files or [])
939 939 editform = mergeeditform(repo[None], 'import.normal')
940 940 if opts.get('exact'):
941 941 editor = None
942 942 else:
943 943 editor = getcommiteditor(editform=editform, **opts)
944 944 allowemptyback = repo.ui.backupconfig('ui', 'allowemptycommit')
945 945 try:
946 946 if partial:
947 947 repo.ui.setconfig('ui', 'allowemptycommit', True)
948 948 n = repo.commit(message, opts.get('user') or user,
949 949 opts.get('date') or date, match=m,
950 950 editor=editor)
951 951 finally:
952 952 repo.ui.restoreconfig(allowemptyback)
953 953 dsguard.close()
954 954 else:
955 955 if opts.get('exact') or opts.get('import_branch'):
956 956 branch = branch or 'default'
957 957 else:
958 958 branch = p1.branch()
959 959 store = patch.filestore()
960 960 try:
961 961 files = set()
962 962 try:
963 963 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
964 964 files, eolmode=None)
965 965 except patch.PatchError as e:
966 966 raise util.Abort(str(e))
967 967 if opts.get('exact'):
968 968 editor = None
969 969 else:
970 970 editor = getcommiteditor(editform='import.bypass')
971 971 memctx = context.makememctx(repo, (p1.node(), p2.node()),
972 972 message,
973 973 opts.get('user') or user,
974 974 opts.get('date') or date,
975 975 branch, files, store,
976 976 editor=editor)
977 977 n = memctx.commit()
978 978 finally:
979 979 store.close()
980 980 if opts.get('exact') and opts.get('no_commit'):
981 981 # --exact with --no-commit is still useful in that it does merge
982 982 # and branch bits
983 983 ui.warn(_("warning: can't check exact import with --no-commit\n"))
984 984 elif opts.get('exact') and hex(n) != nodeid:
985 985 raise util.Abort(_('patch is damaged or loses information'))
986 986 if n:
987 987 # i18n: refers to a short changeset id
988 988 msg = _('created %s') % short(n)
989 989 return (msg, n, rejects)
990 990 finally:
991 991 lockmod.release(dsguard)
992 992 os.unlink(tmpname)
993 993
994 # facility to let extensions include additional data in an exported patch
995 # list of identifiers to be executed in order
996 extraexport = []
997 # mapping from identifier to actual export function
998 # function as to return a string to be added to the header or None
999 # it is given two arguments (sequencenumber, changectx)
1000 extraexportmap = {}
1001
994 1002 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
995 1003 opts=None, match=None):
996 1004 '''export changesets as hg patches.'''
997 1005
998 1006 total = len(revs)
999 1007 revwidth = max([len(str(rev)) for rev in revs])
1000 1008 filemode = {}
1001 1009
1002 1010 def single(rev, seqno, fp):
1003 1011 ctx = repo[rev]
1004 1012 node = ctx.node()
1005 1013 parents = [p.node() for p in ctx.parents() if p]
1006 1014 branch = ctx.branch()
1007 1015 if switch_parent:
1008 1016 parents.reverse()
1009 1017
1010 1018 if parents:
1011 1019 prev = parents[0]
1012 1020 else:
1013 1021 prev = nullid
1014 1022
1015 1023 shouldclose = False
1016 1024 if not fp and len(template) > 0:
1017 1025 desc_lines = ctx.description().rstrip().split('\n')
1018 1026 desc = desc_lines[0] #Commit always has a first line.
1019 1027 fp = makefileobj(repo, template, node, desc=desc, total=total,
1020 1028 seqno=seqno, revwidth=revwidth, mode='wb',
1021 1029 modemap=filemode)
1022 1030 if fp != template:
1023 1031 shouldclose = True
1024 1032 if fp and fp != sys.stdout and util.safehasattr(fp, 'name'):
1025 1033 repo.ui.note("%s\n" % fp.name)
1026 1034
1027 1035 if not fp:
1028 1036 write = repo.ui.write
1029 1037 else:
1030 1038 def write(s, **kw):
1031 1039 fp.write(s)
1032 1040
1033 1041 write("# HG changeset patch\n")
1034 1042 write("# User %s\n" % ctx.user())
1035 1043 write("# Date %d %d\n" % ctx.date())
1036 1044 write("# %s\n" % util.datestr(ctx.date()))
1037 1045 if branch and branch != 'default':
1038 1046 write("# Branch %s\n" % branch)
1039 1047 write("# Node ID %s\n" % hex(node))
1040 1048 write("# Parent %s\n" % hex(prev))
1041 1049 if len(parents) > 1:
1042 1050 write("# Parent %s\n" % hex(parents[1]))
1051
1052 for headerid in extraexport:
1053 header = extraexportmap[headerid](seqno, ctx)
1054 if header is not None:
1055 write('# %s\n' % header)
1043 1056 write(ctx.description().rstrip())
1044 1057 write("\n\n")
1045 1058
1046 1059 for chunk, label in patch.diffui(repo, prev, node, match, opts=opts):
1047 1060 write(chunk, label=label)
1048 1061
1049 1062 if shouldclose:
1050 1063 fp.close()
1051 1064
1052 1065 for seqno, rev in enumerate(revs):
1053 1066 single(rev, seqno + 1, fp)
1054 1067
1055 1068 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1056 1069 changes=None, stat=False, fp=None, prefix='',
1057 1070 root='', listsubrepos=False):
1058 1071 '''show diff or diffstat.'''
1059 1072 if fp is None:
1060 1073 write = ui.write
1061 1074 else:
1062 1075 def write(s, **kw):
1063 1076 fp.write(s)
1064 1077
1065 1078 if root:
1066 1079 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1067 1080 else:
1068 1081 relroot = ''
1069 1082 if relroot != '':
1070 1083 # XXX relative roots currently don't work if the root is within a
1071 1084 # subrepo
1072 1085 uirelroot = match.uipath(relroot)
1073 1086 relroot += '/'
1074 1087 for matchroot in match.files():
1075 1088 if not matchroot.startswith(relroot):
1076 1089 ui.warn(_('warning: %s not inside relative root %s\n') % (
1077 1090 match.uipath(matchroot), uirelroot))
1078 1091
1079 1092 if stat:
1080 1093 diffopts = diffopts.copy(context=0)
1081 1094 width = 80
1082 1095 if not ui.plain():
1083 1096 width = ui.termwidth()
1084 1097 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1085 1098 prefix=prefix, relroot=relroot)
1086 1099 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1087 1100 width=width,
1088 1101 git=diffopts.git):
1089 1102 write(chunk, label=label)
1090 1103 else:
1091 1104 for chunk, label in patch.diffui(repo, node1, node2, match,
1092 1105 changes, diffopts, prefix=prefix,
1093 1106 relroot=relroot):
1094 1107 write(chunk, label=label)
1095 1108
1096 1109 if listsubrepos:
1097 1110 ctx1 = repo[node1]
1098 1111 ctx2 = repo[node2]
1099 1112 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1100 1113 tempnode2 = node2
1101 1114 try:
1102 1115 if node2 is not None:
1103 1116 tempnode2 = ctx2.substate[subpath][1]
1104 1117 except KeyError:
1105 1118 # A subrepo that existed in node1 was deleted between node1 and
1106 1119 # node2 (inclusive). Thus, ctx2's substate won't contain that
1107 1120 # subpath. The best we can do is to ignore it.
1108 1121 tempnode2 = None
1109 1122 submatch = matchmod.narrowmatcher(subpath, match)
1110 1123 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1111 1124 stat=stat, fp=fp, prefix=prefix)
1112 1125
1113 1126 class changeset_printer(object):
1114 1127 '''show changeset information when templating not requested.'''
1115 1128
1116 1129 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1117 1130 self.ui = ui
1118 1131 self.repo = repo
1119 1132 self.buffered = buffered
1120 1133 self.matchfn = matchfn
1121 1134 self.diffopts = diffopts
1122 1135 self.header = {}
1123 1136 self.hunk = {}
1124 1137 self.lastheader = None
1125 1138 self.footer = None
1126 1139
1127 1140 def flush(self, ctx):
1128 1141 rev = ctx.rev()
1129 1142 if rev in self.header:
1130 1143 h = self.header[rev]
1131 1144 if h != self.lastheader:
1132 1145 self.lastheader = h
1133 1146 self.ui.write(h)
1134 1147 del self.header[rev]
1135 1148 if rev in self.hunk:
1136 1149 self.ui.write(self.hunk[rev])
1137 1150 del self.hunk[rev]
1138 1151 return 1
1139 1152 return 0
1140 1153
1141 1154 def close(self):
1142 1155 if self.footer:
1143 1156 self.ui.write(self.footer)
1144 1157
1145 1158 def show(self, ctx, copies=None, matchfn=None, **props):
1146 1159 if self.buffered:
1147 1160 self.ui.pushbuffer()
1148 1161 self._show(ctx, copies, matchfn, props)
1149 1162 self.hunk[ctx.rev()] = self.ui.popbuffer(labeled=True)
1150 1163 else:
1151 1164 self._show(ctx, copies, matchfn, props)
1152 1165
1153 1166 def _show(self, ctx, copies, matchfn, props):
1154 1167 '''show a single changeset or file revision'''
1155 1168 changenode = ctx.node()
1156 1169 rev = ctx.rev()
1157 1170 if self.ui.debugflag:
1158 1171 hexfunc = hex
1159 1172 else:
1160 1173 hexfunc = short
1161 1174 # as of now, wctx.node() and wctx.rev() return None, but we want to
1162 1175 # show the same values as {node} and {rev} templatekw
1163 1176 revnode = (scmutil.intrev(rev), hexfunc(bin(ctx.hex())))
1164 1177
1165 1178 if self.ui.quiet:
1166 1179 self.ui.write("%d:%s\n" % revnode, label='log.node')
1167 1180 return
1168 1181
1169 1182 date = util.datestr(ctx.date())
1170 1183
1171 1184 # i18n: column positioning for "hg log"
1172 1185 self.ui.write(_("changeset: %d:%s\n") % revnode,
1173 1186 label='log.changeset changeset.%s' % ctx.phasestr())
1174 1187
1175 1188 # branches are shown first before any other names due to backwards
1176 1189 # compatibility
1177 1190 branch = ctx.branch()
1178 1191 # don't show the default branch name
1179 1192 if branch != 'default':
1180 1193 # i18n: column positioning for "hg log"
1181 1194 self.ui.write(_("branch: %s\n") % branch,
1182 1195 label='log.branch')
1183 1196
1184 1197 for name, ns in self.repo.names.iteritems():
1185 1198 # branches has special logic already handled above, so here we just
1186 1199 # skip it
1187 1200 if name == 'branches':
1188 1201 continue
1189 1202 # we will use the templatename as the color name since those two
1190 1203 # should be the same
1191 1204 for name in ns.names(self.repo, changenode):
1192 1205 self.ui.write(ns.logfmt % name,
1193 1206 label='log.%s' % ns.colorname)
1194 1207 if self.ui.debugflag:
1195 1208 # i18n: column positioning for "hg log"
1196 1209 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1197 1210 label='log.phase')
1198 1211 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1199 1212 label = 'log.parent changeset.%s' % pctx.phasestr()
1200 1213 # i18n: column positioning for "hg log"
1201 1214 self.ui.write(_("parent: %d:%s\n")
1202 1215 % (pctx.rev(), hexfunc(pctx.node())),
1203 1216 label=label)
1204 1217
1205 1218 if self.ui.debugflag and rev is not None:
1206 1219 mnode = ctx.manifestnode()
1207 1220 # i18n: column positioning for "hg log"
1208 1221 self.ui.write(_("manifest: %d:%s\n") %
1209 1222 (self.repo.manifest.rev(mnode), hex(mnode)),
1210 1223 label='ui.debug log.manifest')
1211 1224 # i18n: column positioning for "hg log"
1212 1225 self.ui.write(_("user: %s\n") % ctx.user(),
1213 1226 label='log.user')
1214 1227 # i18n: column positioning for "hg log"
1215 1228 self.ui.write(_("date: %s\n") % date,
1216 1229 label='log.date')
1217 1230
1218 1231 if self.ui.debugflag:
1219 1232 files = ctx.p1().status(ctx)[:3]
1220 1233 for key, value in zip([# i18n: column positioning for "hg log"
1221 1234 _("files:"),
1222 1235 # i18n: column positioning for "hg log"
1223 1236 _("files+:"),
1224 1237 # i18n: column positioning for "hg log"
1225 1238 _("files-:")], files):
1226 1239 if value:
1227 1240 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1228 1241 label='ui.debug log.files')
1229 1242 elif ctx.files() and self.ui.verbose:
1230 1243 # i18n: column positioning for "hg log"
1231 1244 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1232 1245 label='ui.note log.files')
1233 1246 if copies and self.ui.verbose:
1234 1247 copies = ['%s (%s)' % c for c in copies]
1235 1248 # i18n: column positioning for "hg log"
1236 1249 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1237 1250 label='ui.note log.copies')
1238 1251
1239 1252 extra = ctx.extra()
1240 1253 if extra and self.ui.debugflag:
1241 1254 for key, value in sorted(extra.items()):
1242 1255 # i18n: column positioning for "hg log"
1243 1256 self.ui.write(_("extra: %s=%s\n")
1244 1257 % (key, value.encode('string_escape')),
1245 1258 label='ui.debug log.extra')
1246 1259
1247 1260 description = ctx.description().strip()
1248 1261 if description:
1249 1262 if self.ui.verbose:
1250 1263 self.ui.write(_("description:\n"),
1251 1264 label='ui.note log.description')
1252 1265 self.ui.write(description,
1253 1266 label='ui.note log.description')
1254 1267 self.ui.write("\n\n")
1255 1268 else:
1256 1269 # i18n: column positioning for "hg log"
1257 1270 self.ui.write(_("summary: %s\n") %
1258 1271 description.splitlines()[0],
1259 1272 label='log.summary')
1260 1273 self.ui.write("\n")
1261 1274
1262 1275 self.showpatch(changenode, matchfn)
1263 1276
1264 1277 def showpatch(self, node, matchfn):
1265 1278 if not matchfn:
1266 1279 matchfn = self.matchfn
1267 1280 if matchfn:
1268 1281 stat = self.diffopts.get('stat')
1269 1282 diff = self.diffopts.get('patch')
1270 1283 diffopts = patch.diffallopts(self.ui, self.diffopts)
1271 1284 prev = self.repo.changelog.parents(node)[0]
1272 1285 if stat:
1273 1286 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1274 1287 match=matchfn, stat=True)
1275 1288 if diff:
1276 1289 if stat:
1277 1290 self.ui.write("\n")
1278 1291 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1279 1292 match=matchfn, stat=False)
1280 1293 self.ui.write("\n")
1281 1294
1282 1295 class jsonchangeset(changeset_printer):
1283 1296 '''format changeset information.'''
1284 1297
1285 1298 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1286 1299 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1287 1300 self.cache = {}
1288 1301 self._first = True
1289 1302
1290 1303 def close(self):
1291 1304 if not self._first:
1292 1305 self.ui.write("\n]\n")
1293 1306 else:
1294 1307 self.ui.write("[]\n")
1295 1308
1296 1309 def _show(self, ctx, copies, matchfn, props):
1297 1310 '''show a single changeset or file revision'''
1298 1311 rev = ctx.rev()
1299 1312 if rev is None:
1300 1313 jrev = jnode = 'null'
1301 1314 else:
1302 1315 jrev = str(rev)
1303 1316 jnode = '"%s"' % hex(ctx.node())
1304 1317 j = encoding.jsonescape
1305 1318
1306 1319 if self._first:
1307 1320 self.ui.write("[\n {")
1308 1321 self._first = False
1309 1322 else:
1310 1323 self.ui.write(",\n {")
1311 1324
1312 1325 if self.ui.quiet:
1313 1326 self.ui.write('\n "rev": %s' % jrev)
1314 1327 self.ui.write(',\n "node": %s' % jnode)
1315 1328 self.ui.write('\n }')
1316 1329 return
1317 1330
1318 1331 self.ui.write('\n "rev": %s' % jrev)
1319 1332 self.ui.write(',\n "node": %s' % jnode)
1320 1333 self.ui.write(',\n "branch": "%s"' % j(ctx.branch()))
1321 1334 self.ui.write(',\n "phase": "%s"' % ctx.phasestr())
1322 1335 self.ui.write(',\n "user": "%s"' % j(ctx.user()))
1323 1336 self.ui.write(',\n "date": [%d, %d]' % ctx.date())
1324 1337 self.ui.write(',\n "desc": "%s"' % j(ctx.description()))
1325 1338
1326 1339 self.ui.write(',\n "bookmarks": [%s]' %
1327 1340 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1328 1341 self.ui.write(',\n "tags": [%s]' %
1329 1342 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1330 1343 self.ui.write(',\n "parents": [%s]' %
1331 1344 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1332 1345
1333 1346 if self.ui.debugflag:
1334 1347 if rev is None:
1335 1348 jmanifestnode = 'null'
1336 1349 else:
1337 1350 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1338 1351 self.ui.write(',\n "manifest": %s' % jmanifestnode)
1339 1352
1340 1353 self.ui.write(',\n "extra": {%s}' %
1341 1354 ", ".join('"%s": "%s"' % (j(k), j(v))
1342 1355 for k, v in ctx.extra().items()))
1343 1356
1344 1357 files = ctx.p1().status(ctx)
1345 1358 self.ui.write(',\n "modified": [%s]' %
1346 1359 ", ".join('"%s"' % j(f) for f in files[0]))
1347 1360 self.ui.write(',\n "added": [%s]' %
1348 1361 ", ".join('"%s"' % j(f) for f in files[1]))
1349 1362 self.ui.write(',\n "removed": [%s]' %
1350 1363 ", ".join('"%s"' % j(f) for f in files[2]))
1351 1364
1352 1365 elif self.ui.verbose:
1353 1366 self.ui.write(',\n "files": [%s]' %
1354 1367 ", ".join('"%s"' % j(f) for f in ctx.files()))
1355 1368
1356 1369 if copies:
1357 1370 self.ui.write(',\n "copies": {%s}' %
1358 1371 ", ".join('"%s": "%s"' % (j(k), j(v))
1359 1372 for k, v in copies))
1360 1373
1361 1374 matchfn = self.matchfn
1362 1375 if matchfn:
1363 1376 stat = self.diffopts.get('stat')
1364 1377 diff = self.diffopts.get('patch')
1365 1378 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1366 1379 node, prev = ctx.node(), ctx.p1().node()
1367 1380 if stat:
1368 1381 self.ui.pushbuffer()
1369 1382 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1370 1383 match=matchfn, stat=True)
1371 1384 self.ui.write(',\n "diffstat": "%s"' % j(self.ui.popbuffer()))
1372 1385 if diff:
1373 1386 self.ui.pushbuffer()
1374 1387 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1375 1388 match=matchfn, stat=False)
1376 1389 self.ui.write(',\n "diff": "%s"' % j(self.ui.popbuffer()))
1377 1390
1378 1391 self.ui.write("\n }")
1379 1392
1380 1393 class changeset_templater(changeset_printer):
1381 1394 '''format changeset information.'''
1382 1395
1383 1396 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1384 1397 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1385 1398 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
1386 1399 defaulttempl = {
1387 1400 'parent': '{rev}:{node|formatnode} ',
1388 1401 'manifest': '{rev}:{node|formatnode}',
1389 1402 'file_copy': '{name} ({source})',
1390 1403 'extra': '{key}={value|stringescape}'
1391 1404 }
1392 1405 # filecopy is preserved for compatibility reasons
1393 1406 defaulttempl['filecopy'] = defaulttempl['file_copy']
1394 1407 self.t = templater.templater(mapfile, {'formatnode': formatnode},
1395 1408 cache=defaulttempl)
1396 1409 if tmpl:
1397 1410 self.t.cache['changeset'] = tmpl
1398 1411
1399 1412 self.cache = {}
1400 1413
1401 1414 # find correct templates for current mode
1402 1415 tmplmodes = [
1403 1416 (True, None),
1404 1417 (self.ui.verbose, 'verbose'),
1405 1418 (self.ui.quiet, 'quiet'),
1406 1419 (self.ui.debugflag, 'debug'),
1407 1420 ]
1408 1421
1409 1422 self._parts = {'header': '', 'footer': '', 'changeset': 'changeset',
1410 1423 'docheader': '', 'docfooter': ''}
1411 1424 for mode, postfix in tmplmodes:
1412 1425 for t in self._parts:
1413 1426 cur = t
1414 1427 if postfix:
1415 1428 cur += "_" + postfix
1416 1429 if mode and cur in self.t:
1417 1430 self._parts[t] = cur
1418 1431
1419 1432 if self._parts['docheader']:
1420 1433 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1421 1434
1422 1435 def close(self):
1423 1436 if self._parts['docfooter']:
1424 1437 if not self.footer:
1425 1438 self.footer = ""
1426 1439 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1427 1440 return super(changeset_templater, self).close()
1428 1441
1429 1442 def _show(self, ctx, copies, matchfn, props):
1430 1443 '''show a single changeset or file revision'''
1431 1444 props = props.copy()
1432 1445 props.update(templatekw.keywords)
1433 1446 props['templ'] = self.t
1434 1447 props['ctx'] = ctx
1435 1448 props['repo'] = self.repo
1436 1449 props['revcache'] = {'copies': copies}
1437 1450 props['cache'] = self.cache
1438 1451
1439 1452 try:
1440 1453 # write header
1441 1454 if self._parts['header']:
1442 1455 h = templater.stringify(self.t(self._parts['header'], **props))
1443 1456 if self.buffered:
1444 1457 self.header[ctx.rev()] = h
1445 1458 else:
1446 1459 if self.lastheader != h:
1447 1460 self.lastheader = h
1448 1461 self.ui.write(h)
1449 1462
1450 1463 # write changeset metadata, then patch if requested
1451 1464 key = self._parts['changeset']
1452 1465 self.ui.write(templater.stringify(self.t(key, **props)))
1453 1466 self.showpatch(ctx.node(), matchfn)
1454 1467
1455 1468 if self._parts['footer']:
1456 1469 if not self.footer:
1457 1470 self.footer = templater.stringify(
1458 1471 self.t(self._parts['footer'], **props))
1459 1472 except KeyError as inst:
1460 1473 msg = _("%s: no key named '%s'")
1461 1474 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
1462 1475 except SyntaxError as inst:
1463 1476 raise util.Abort('%s: %s' % (self.t.mapfile, inst.args[0]))
1464 1477
1465 1478 def gettemplate(ui, tmpl, style):
1466 1479 """
1467 1480 Find the template matching the given template spec or style.
1468 1481 """
1469 1482
1470 1483 # ui settings
1471 1484 if not tmpl and not style: # template are stronger than style
1472 1485 tmpl = ui.config('ui', 'logtemplate')
1473 1486 if tmpl:
1474 1487 try:
1475 1488 tmpl = templater.unquotestring(tmpl)
1476 1489 except SyntaxError:
1477 1490 pass
1478 1491 return tmpl, None
1479 1492 else:
1480 1493 style = util.expandpath(ui.config('ui', 'style', ''))
1481 1494
1482 1495 if not tmpl and style:
1483 1496 mapfile = style
1484 1497 if not os.path.split(mapfile)[0]:
1485 1498 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1486 1499 or templater.templatepath(mapfile))
1487 1500 if mapname:
1488 1501 mapfile = mapname
1489 1502 return None, mapfile
1490 1503
1491 1504 if not tmpl:
1492 1505 return None, None
1493 1506
1494 1507 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1495 1508
1496 1509 def show_changeset(ui, repo, opts, buffered=False):
1497 1510 """show one changeset using template or regular display.
1498 1511
1499 1512 Display format will be the first non-empty hit of:
1500 1513 1. option 'template'
1501 1514 2. option 'style'
1502 1515 3. [ui] setting 'logtemplate'
1503 1516 4. [ui] setting 'style'
1504 1517 If all of these values are either the unset or the empty string,
1505 1518 regular display via changeset_printer() is done.
1506 1519 """
1507 1520 # options
1508 1521 matchfn = None
1509 1522 if opts.get('patch') or opts.get('stat'):
1510 1523 matchfn = scmutil.matchall(repo)
1511 1524
1512 1525 if opts.get('template') == 'json':
1513 1526 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1514 1527
1515 1528 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1516 1529
1517 1530 if not tmpl and not mapfile:
1518 1531 return changeset_printer(ui, repo, matchfn, opts, buffered)
1519 1532
1520 1533 try:
1521 1534 t = changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile,
1522 1535 buffered)
1523 1536 except SyntaxError as inst:
1524 1537 raise util.Abort(inst.args[0])
1525 1538 return t
1526 1539
1527 1540 def showmarker(ui, marker):
1528 1541 """utility function to display obsolescence marker in a readable way
1529 1542
1530 1543 To be used by debug function."""
1531 1544 ui.write(hex(marker.precnode()))
1532 1545 for repl in marker.succnodes():
1533 1546 ui.write(' ')
1534 1547 ui.write(hex(repl))
1535 1548 ui.write(' %X ' % marker.flags())
1536 1549 parents = marker.parentnodes()
1537 1550 if parents is not None:
1538 1551 ui.write('{%s} ' % ', '.join(hex(p) for p in parents))
1539 1552 ui.write('(%s) ' % util.datestr(marker.date()))
1540 1553 ui.write('{%s}' % (', '.join('%r: %r' % t for t in
1541 1554 sorted(marker.metadata().items())
1542 1555 if t[0] != 'date')))
1543 1556 ui.write('\n')
1544 1557
1545 1558 def finddate(ui, repo, date):
1546 1559 """Find the tipmost changeset that matches the given date spec"""
1547 1560
1548 1561 df = util.matchdate(date)
1549 1562 m = scmutil.matchall(repo)
1550 1563 results = {}
1551 1564
1552 1565 def prep(ctx, fns):
1553 1566 d = ctx.date()
1554 1567 if df(d[0]):
1555 1568 results[ctx.rev()] = d
1556 1569
1557 1570 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1558 1571 rev = ctx.rev()
1559 1572 if rev in results:
1560 1573 ui.status(_("found revision %s from %s\n") %
1561 1574 (rev, util.datestr(results[rev])))
1562 1575 return str(rev)
1563 1576
1564 1577 raise util.Abort(_("revision matching date not found"))
1565 1578
1566 1579 def increasingwindows(windowsize=8, sizelimit=512):
1567 1580 while True:
1568 1581 yield windowsize
1569 1582 if windowsize < sizelimit:
1570 1583 windowsize *= 2
1571 1584
1572 1585 class FileWalkError(Exception):
1573 1586 pass
1574 1587
1575 1588 def walkfilerevs(repo, match, follow, revs, fncache):
1576 1589 '''Walks the file history for the matched files.
1577 1590
1578 1591 Returns the changeset revs that are involved in the file history.
1579 1592
1580 1593 Throws FileWalkError if the file history can't be walked using
1581 1594 filelogs alone.
1582 1595 '''
1583 1596 wanted = set()
1584 1597 copies = []
1585 1598 minrev, maxrev = min(revs), max(revs)
1586 1599 def filerevgen(filelog, last):
1587 1600 """
1588 1601 Only files, no patterns. Check the history of each file.
1589 1602
1590 1603 Examines filelog entries within minrev, maxrev linkrev range
1591 1604 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1592 1605 tuples in backwards order
1593 1606 """
1594 1607 cl_count = len(repo)
1595 1608 revs = []
1596 1609 for j in xrange(0, last + 1):
1597 1610 linkrev = filelog.linkrev(j)
1598 1611 if linkrev < minrev:
1599 1612 continue
1600 1613 # only yield rev for which we have the changelog, it can
1601 1614 # happen while doing "hg log" during a pull or commit
1602 1615 if linkrev >= cl_count:
1603 1616 break
1604 1617
1605 1618 parentlinkrevs = []
1606 1619 for p in filelog.parentrevs(j):
1607 1620 if p != nullrev:
1608 1621 parentlinkrevs.append(filelog.linkrev(p))
1609 1622 n = filelog.node(j)
1610 1623 revs.append((linkrev, parentlinkrevs,
1611 1624 follow and filelog.renamed(n)))
1612 1625
1613 1626 return reversed(revs)
1614 1627 def iterfiles():
1615 1628 pctx = repo['.']
1616 1629 for filename in match.files():
1617 1630 if follow:
1618 1631 if filename not in pctx:
1619 1632 raise util.Abort(_('cannot follow file not in parent '
1620 1633 'revision: "%s"') % filename)
1621 1634 yield filename, pctx[filename].filenode()
1622 1635 else:
1623 1636 yield filename, None
1624 1637 for filename_node in copies:
1625 1638 yield filename_node
1626 1639
1627 1640 for file_, node in iterfiles():
1628 1641 filelog = repo.file(file_)
1629 1642 if not len(filelog):
1630 1643 if node is None:
1631 1644 # A zero count may be a directory or deleted file, so
1632 1645 # try to find matching entries on the slow path.
1633 1646 if follow:
1634 1647 raise util.Abort(
1635 1648 _('cannot follow nonexistent file: "%s"') % file_)
1636 1649 raise FileWalkError("Cannot walk via filelog")
1637 1650 else:
1638 1651 continue
1639 1652
1640 1653 if node is None:
1641 1654 last = len(filelog) - 1
1642 1655 else:
1643 1656 last = filelog.rev(node)
1644 1657
1645 1658 # keep track of all ancestors of the file
1646 1659 ancestors = set([filelog.linkrev(last)])
1647 1660
1648 1661 # iterate from latest to oldest revision
1649 1662 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1650 1663 if not follow:
1651 1664 if rev > maxrev:
1652 1665 continue
1653 1666 else:
1654 1667 # Note that last might not be the first interesting
1655 1668 # rev to us:
1656 1669 # if the file has been changed after maxrev, we'll
1657 1670 # have linkrev(last) > maxrev, and we still need
1658 1671 # to explore the file graph
1659 1672 if rev not in ancestors:
1660 1673 continue
1661 1674 # XXX insert 1327 fix here
1662 1675 if flparentlinkrevs:
1663 1676 ancestors.update(flparentlinkrevs)
1664 1677
1665 1678 fncache.setdefault(rev, []).append(file_)
1666 1679 wanted.add(rev)
1667 1680 if copied:
1668 1681 copies.append(copied)
1669 1682
1670 1683 return wanted
1671 1684
1672 1685 class _followfilter(object):
1673 1686 def __init__(self, repo, onlyfirst=False):
1674 1687 self.repo = repo
1675 1688 self.startrev = nullrev
1676 1689 self.roots = set()
1677 1690 self.onlyfirst = onlyfirst
1678 1691
1679 1692 def match(self, rev):
1680 1693 def realparents(rev):
1681 1694 if self.onlyfirst:
1682 1695 return self.repo.changelog.parentrevs(rev)[0:1]
1683 1696 else:
1684 1697 return filter(lambda x: x != nullrev,
1685 1698 self.repo.changelog.parentrevs(rev))
1686 1699
1687 1700 if self.startrev == nullrev:
1688 1701 self.startrev = rev
1689 1702 return True
1690 1703
1691 1704 if rev > self.startrev:
1692 1705 # forward: all descendants
1693 1706 if not self.roots:
1694 1707 self.roots.add(self.startrev)
1695 1708 for parent in realparents(rev):
1696 1709 if parent in self.roots:
1697 1710 self.roots.add(rev)
1698 1711 return True
1699 1712 else:
1700 1713 # backwards: all parents
1701 1714 if not self.roots:
1702 1715 self.roots.update(realparents(self.startrev))
1703 1716 if rev in self.roots:
1704 1717 self.roots.remove(rev)
1705 1718 self.roots.update(realparents(rev))
1706 1719 return True
1707 1720
1708 1721 return False
1709 1722
1710 1723 def walkchangerevs(repo, match, opts, prepare):
1711 1724 '''Iterate over files and the revs in which they changed.
1712 1725
1713 1726 Callers most commonly need to iterate backwards over the history
1714 1727 in which they are interested. Doing so has awful (quadratic-looking)
1715 1728 performance, so we use iterators in a "windowed" way.
1716 1729
1717 1730 We walk a window of revisions in the desired order. Within the
1718 1731 window, we first walk forwards to gather data, then in the desired
1719 1732 order (usually backwards) to display it.
1720 1733
1721 1734 This function returns an iterator yielding contexts. Before
1722 1735 yielding each context, the iterator will first call the prepare
1723 1736 function on each context in the window in forward order.'''
1724 1737
1725 1738 follow = opts.get('follow') or opts.get('follow_first')
1726 1739 revs = _logrevs(repo, opts)
1727 1740 if not revs:
1728 1741 return []
1729 1742 wanted = set()
1730 1743 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1731 1744 opts.get('removed'))
1732 1745 fncache = {}
1733 1746 change = repo.changectx
1734 1747
1735 1748 # First step is to fill wanted, the set of revisions that we want to yield.
1736 1749 # When it does not induce extra cost, we also fill fncache for revisions in
1737 1750 # wanted: a cache of filenames that were changed (ctx.files()) and that
1738 1751 # match the file filtering conditions.
1739 1752
1740 1753 if match.always():
1741 1754 # No files, no patterns. Display all revs.
1742 1755 wanted = revs
1743 1756 elif not slowpath:
1744 1757 # We only have to read through the filelog to find wanted revisions
1745 1758
1746 1759 try:
1747 1760 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1748 1761 except FileWalkError:
1749 1762 slowpath = True
1750 1763
1751 1764 # We decided to fall back to the slowpath because at least one
1752 1765 # of the paths was not a file. Check to see if at least one of them
1753 1766 # existed in history, otherwise simply return
1754 1767 for path in match.files():
1755 1768 if path == '.' or path in repo.store:
1756 1769 break
1757 1770 else:
1758 1771 return []
1759 1772
1760 1773 if slowpath:
1761 1774 # We have to read the changelog to match filenames against
1762 1775 # changed files
1763 1776
1764 1777 if follow:
1765 1778 raise util.Abort(_('can only follow copies/renames for explicit '
1766 1779 'filenames'))
1767 1780
1768 1781 # The slow path checks files modified in every changeset.
1769 1782 # This is really slow on large repos, so compute the set lazily.
1770 1783 class lazywantedset(object):
1771 1784 def __init__(self):
1772 1785 self.set = set()
1773 1786 self.revs = set(revs)
1774 1787
1775 1788 # No need to worry about locality here because it will be accessed
1776 1789 # in the same order as the increasing window below.
1777 1790 def __contains__(self, value):
1778 1791 if value in self.set:
1779 1792 return True
1780 1793 elif not value in self.revs:
1781 1794 return False
1782 1795 else:
1783 1796 self.revs.discard(value)
1784 1797 ctx = change(value)
1785 1798 matches = filter(match, ctx.files())
1786 1799 if matches:
1787 1800 fncache[value] = matches
1788 1801 self.set.add(value)
1789 1802 return True
1790 1803 return False
1791 1804
1792 1805 def discard(self, value):
1793 1806 self.revs.discard(value)
1794 1807 self.set.discard(value)
1795 1808
1796 1809 wanted = lazywantedset()
1797 1810
1798 1811 # it might be worthwhile to do this in the iterator if the rev range
1799 1812 # is descending and the prune args are all within that range
1800 1813 for rev in opts.get('prune', ()):
1801 1814 rev = repo[rev].rev()
1802 1815 ff = _followfilter(repo)
1803 1816 stop = min(revs[0], revs[-1])
1804 1817 for x in xrange(rev, stop - 1, -1):
1805 1818 if ff.match(x):
1806 1819 wanted = wanted - [x]
1807 1820
1808 1821 # Now that wanted is correctly initialized, we can iterate over the
1809 1822 # revision range, yielding only revisions in wanted.
1810 1823 def iterate():
1811 1824 if follow and match.always():
1812 1825 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1813 1826 def want(rev):
1814 1827 return ff.match(rev) and rev in wanted
1815 1828 else:
1816 1829 def want(rev):
1817 1830 return rev in wanted
1818 1831
1819 1832 it = iter(revs)
1820 1833 stopiteration = False
1821 1834 for windowsize in increasingwindows():
1822 1835 nrevs = []
1823 1836 for i in xrange(windowsize):
1824 1837 rev = next(it, None)
1825 1838 if rev is None:
1826 1839 stopiteration = True
1827 1840 break
1828 1841 elif want(rev):
1829 1842 nrevs.append(rev)
1830 1843 for rev in sorted(nrevs):
1831 1844 fns = fncache.get(rev)
1832 1845 ctx = change(rev)
1833 1846 if not fns:
1834 1847 def fns_generator():
1835 1848 for f in ctx.files():
1836 1849 if match(f):
1837 1850 yield f
1838 1851 fns = fns_generator()
1839 1852 prepare(ctx, fns)
1840 1853 for rev in nrevs:
1841 1854 yield change(rev)
1842 1855
1843 1856 if stopiteration:
1844 1857 break
1845 1858
1846 1859 return iterate()
1847 1860
1848 1861 def _makefollowlogfilematcher(repo, files, followfirst):
1849 1862 # When displaying a revision with --patch --follow FILE, we have
1850 1863 # to know which file of the revision must be diffed. With
1851 1864 # --follow, we want the names of the ancestors of FILE in the
1852 1865 # revision, stored in "fcache". "fcache" is populated by
1853 1866 # reproducing the graph traversal already done by --follow revset
1854 1867 # and relating linkrevs to file names (which is not "correct" but
1855 1868 # good enough).
1856 1869 fcache = {}
1857 1870 fcacheready = [False]
1858 1871 pctx = repo['.']
1859 1872
1860 1873 def populate():
1861 1874 for fn in files:
1862 1875 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
1863 1876 for c in i:
1864 1877 fcache.setdefault(c.linkrev(), set()).add(c.path())
1865 1878
1866 1879 def filematcher(rev):
1867 1880 if not fcacheready[0]:
1868 1881 # Lazy initialization
1869 1882 fcacheready[0] = True
1870 1883 populate()
1871 1884 return scmutil.matchfiles(repo, fcache.get(rev, []))
1872 1885
1873 1886 return filematcher
1874 1887
1875 1888 def _makenofollowlogfilematcher(repo, pats, opts):
1876 1889 '''hook for extensions to override the filematcher for non-follow cases'''
1877 1890 return None
1878 1891
1879 1892 def _makelogrevset(repo, pats, opts, revs):
1880 1893 """Return (expr, filematcher) where expr is a revset string built
1881 1894 from log options and file patterns or None. If --stat or --patch
1882 1895 are not passed filematcher is None. Otherwise it is a callable
1883 1896 taking a revision number and returning a match objects filtering
1884 1897 the files to be detailed when displaying the revision.
1885 1898 """
1886 1899 opt2revset = {
1887 1900 'no_merges': ('not merge()', None),
1888 1901 'only_merges': ('merge()', None),
1889 1902 '_ancestors': ('ancestors(%(val)s)', None),
1890 1903 '_fancestors': ('_firstancestors(%(val)s)', None),
1891 1904 '_descendants': ('descendants(%(val)s)', None),
1892 1905 '_fdescendants': ('_firstdescendants(%(val)s)', None),
1893 1906 '_matchfiles': ('_matchfiles(%(val)s)', None),
1894 1907 'date': ('date(%(val)r)', None),
1895 1908 'branch': ('branch(%(val)r)', ' or '),
1896 1909 '_patslog': ('filelog(%(val)r)', ' or '),
1897 1910 '_patsfollow': ('follow(%(val)r)', ' or '),
1898 1911 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
1899 1912 'keyword': ('keyword(%(val)r)', ' or '),
1900 1913 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
1901 1914 'user': ('user(%(val)r)', ' or '),
1902 1915 }
1903 1916
1904 1917 opts = dict(opts)
1905 1918 # follow or not follow?
1906 1919 follow = opts.get('follow') or opts.get('follow_first')
1907 1920 if opts.get('follow_first'):
1908 1921 followfirst = 1
1909 1922 else:
1910 1923 followfirst = 0
1911 1924 # --follow with FILE behavior depends on revs...
1912 1925 it = iter(revs)
1913 1926 startrev = it.next()
1914 1927 followdescendants = startrev < next(it, startrev)
1915 1928
1916 1929 # branch and only_branch are really aliases and must be handled at
1917 1930 # the same time
1918 1931 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
1919 1932 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
1920 1933 # pats/include/exclude are passed to match.match() directly in
1921 1934 # _matchfiles() revset but walkchangerevs() builds its matcher with
1922 1935 # scmutil.match(). The difference is input pats are globbed on
1923 1936 # platforms without shell expansion (windows).
1924 1937 wctx = repo[None]
1925 1938 match, pats = scmutil.matchandpats(wctx, pats, opts)
1926 1939 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1927 1940 opts.get('removed'))
1928 1941 if not slowpath:
1929 1942 for f in match.files():
1930 1943 if follow and f not in wctx:
1931 1944 # If the file exists, it may be a directory, so let it
1932 1945 # take the slow path.
1933 1946 if os.path.exists(repo.wjoin(f)):
1934 1947 slowpath = True
1935 1948 continue
1936 1949 else:
1937 1950 raise util.Abort(_('cannot follow file not in parent '
1938 1951 'revision: "%s"') % f)
1939 1952 filelog = repo.file(f)
1940 1953 if not filelog:
1941 1954 # A zero count may be a directory or deleted file, so
1942 1955 # try to find matching entries on the slow path.
1943 1956 if follow:
1944 1957 raise util.Abort(
1945 1958 _('cannot follow nonexistent file: "%s"') % f)
1946 1959 slowpath = True
1947 1960
1948 1961 # We decided to fall back to the slowpath because at least one
1949 1962 # of the paths was not a file. Check to see if at least one of them
1950 1963 # existed in history - in that case, we'll continue down the
1951 1964 # slowpath; otherwise, we can turn off the slowpath
1952 1965 if slowpath:
1953 1966 for path in match.files():
1954 1967 if path == '.' or path in repo.store:
1955 1968 break
1956 1969 else:
1957 1970 slowpath = False
1958 1971
1959 1972 fpats = ('_patsfollow', '_patsfollowfirst')
1960 1973 fnopats = (('_ancestors', '_fancestors'),
1961 1974 ('_descendants', '_fdescendants'))
1962 1975 if slowpath:
1963 1976 # See walkchangerevs() slow path.
1964 1977 #
1965 1978 # pats/include/exclude cannot be represented as separate
1966 1979 # revset expressions as their filtering logic applies at file
1967 1980 # level. For instance "-I a -X a" matches a revision touching
1968 1981 # "a" and "b" while "file(a) and not file(b)" does
1969 1982 # not. Besides, filesets are evaluated against the working
1970 1983 # directory.
1971 1984 matchargs = ['r:', 'd:relpath']
1972 1985 for p in pats:
1973 1986 matchargs.append('p:' + p)
1974 1987 for p in opts.get('include', []):
1975 1988 matchargs.append('i:' + p)
1976 1989 for p in opts.get('exclude', []):
1977 1990 matchargs.append('x:' + p)
1978 1991 matchargs = ','.join(('%r' % p) for p in matchargs)
1979 1992 opts['_matchfiles'] = matchargs
1980 1993 if follow:
1981 1994 opts[fnopats[0][followfirst]] = '.'
1982 1995 else:
1983 1996 if follow:
1984 1997 if pats:
1985 1998 # follow() revset interprets its file argument as a
1986 1999 # manifest entry, so use match.files(), not pats.
1987 2000 opts[fpats[followfirst]] = list(match.files())
1988 2001 else:
1989 2002 op = fnopats[followdescendants][followfirst]
1990 2003 opts[op] = 'rev(%d)' % startrev
1991 2004 else:
1992 2005 opts['_patslog'] = list(pats)
1993 2006
1994 2007 filematcher = None
1995 2008 if opts.get('patch') or opts.get('stat'):
1996 2009 # When following files, track renames via a special matcher.
1997 2010 # If we're forced to take the slowpath it means we're following
1998 2011 # at least one pattern/directory, so don't bother with rename tracking.
1999 2012 if follow and not match.always() and not slowpath:
2000 2013 # _makefollowlogfilematcher expects its files argument to be
2001 2014 # relative to the repo root, so use match.files(), not pats.
2002 2015 filematcher = _makefollowlogfilematcher(repo, match.files(),
2003 2016 followfirst)
2004 2017 else:
2005 2018 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2006 2019 if filematcher is None:
2007 2020 filematcher = lambda rev: match
2008 2021
2009 2022 expr = []
2010 2023 for op, val in sorted(opts.iteritems()):
2011 2024 if not val:
2012 2025 continue
2013 2026 if op not in opt2revset:
2014 2027 continue
2015 2028 revop, andor = opt2revset[op]
2016 2029 if '%(val)' not in revop:
2017 2030 expr.append(revop)
2018 2031 else:
2019 2032 if not isinstance(val, list):
2020 2033 e = revop % {'val': val}
2021 2034 else:
2022 2035 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2023 2036 expr.append(e)
2024 2037
2025 2038 if expr:
2026 2039 expr = '(' + ' and '.join(expr) + ')'
2027 2040 else:
2028 2041 expr = None
2029 2042 return expr, filematcher
2030 2043
2031 2044 def _logrevs(repo, opts):
2032 2045 # Default --rev value depends on --follow but --follow behavior
2033 2046 # depends on revisions resolved from --rev...
2034 2047 follow = opts.get('follow') or opts.get('follow_first')
2035 2048 if opts.get('rev'):
2036 2049 revs = scmutil.revrange(repo, opts['rev'])
2037 2050 elif follow and repo.dirstate.p1() == nullid:
2038 2051 revs = revset.baseset()
2039 2052 elif follow:
2040 2053 revs = repo.revs('reverse(:.)')
2041 2054 else:
2042 2055 revs = revset.spanset(repo)
2043 2056 revs.reverse()
2044 2057 return revs
2045 2058
2046 2059 def getgraphlogrevs(repo, pats, opts):
2047 2060 """Return (revs, expr, filematcher) where revs is an iterable of
2048 2061 revision numbers, expr is a revset string built from log options
2049 2062 and file patterns or None, and used to filter 'revs'. If --stat or
2050 2063 --patch are not passed filematcher is None. Otherwise it is a
2051 2064 callable taking a revision number and returning a match objects
2052 2065 filtering the files to be detailed when displaying the revision.
2053 2066 """
2054 2067 limit = loglimit(opts)
2055 2068 revs = _logrevs(repo, opts)
2056 2069 if not revs:
2057 2070 return revset.baseset(), None, None
2058 2071 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2059 2072 if opts.get('rev'):
2060 2073 # User-specified revs might be unsorted, but don't sort before
2061 2074 # _makelogrevset because it might depend on the order of revs
2062 2075 revs.sort(reverse=True)
2063 2076 if expr:
2064 2077 # Revset matchers often operate faster on revisions in changelog
2065 2078 # order, because most filters deal with the changelog.
2066 2079 revs.reverse()
2067 2080 matcher = revset.match(repo.ui, expr)
2068 2081 # Revset matches can reorder revisions. "A or B" typically returns
2069 2082 # returns the revision matching A then the revision matching B. Sort
2070 2083 # again to fix that.
2071 2084 revs = matcher(repo, revs)
2072 2085 revs.sort(reverse=True)
2073 2086 if limit is not None:
2074 2087 limitedrevs = []
2075 2088 for idx, rev in enumerate(revs):
2076 2089 if idx >= limit:
2077 2090 break
2078 2091 limitedrevs.append(rev)
2079 2092 revs = revset.baseset(limitedrevs)
2080 2093
2081 2094 return revs, expr, filematcher
2082 2095
2083 2096 def getlogrevs(repo, pats, opts):
2084 2097 """Return (revs, expr, filematcher) where revs is an iterable of
2085 2098 revision numbers, expr is a revset string built from log options
2086 2099 and file patterns or None, and used to filter 'revs'. If --stat or
2087 2100 --patch are not passed filematcher is None. Otherwise it is a
2088 2101 callable taking a revision number and returning a match objects
2089 2102 filtering the files to be detailed when displaying the revision.
2090 2103 """
2091 2104 limit = loglimit(opts)
2092 2105 revs = _logrevs(repo, opts)
2093 2106 if not revs:
2094 2107 return revset.baseset([]), None, None
2095 2108 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2096 2109 if expr:
2097 2110 # Revset matchers often operate faster on revisions in changelog
2098 2111 # order, because most filters deal with the changelog.
2099 2112 if not opts.get('rev'):
2100 2113 revs.reverse()
2101 2114 matcher = revset.match(repo.ui, expr)
2102 2115 # Revset matches can reorder revisions. "A or B" typically returns
2103 2116 # returns the revision matching A then the revision matching B. Sort
2104 2117 # again to fix that.
2105 2118 revs = matcher(repo, revs)
2106 2119 if not opts.get('rev'):
2107 2120 revs.sort(reverse=True)
2108 2121 if limit is not None:
2109 2122 limitedrevs = []
2110 2123 for idx, r in enumerate(revs):
2111 2124 if limit <= idx:
2112 2125 break
2113 2126 limitedrevs.append(r)
2114 2127 revs = revset.baseset(limitedrevs)
2115 2128
2116 2129 return revs, expr, filematcher
2117 2130
2118 2131 def displaygraph(ui, dag, displayer, showparents, edgefn, getrenamed=None,
2119 2132 filematcher=None):
2120 2133 seen, state = [], graphmod.asciistate()
2121 2134 for rev, type, ctx, parents in dag:
2122 2135 char = 'o'
2123 2136 if ctx.node() in showparents:
2124 2137 char = '@'
2125 2138 elif ctx.obsolete():
2126 2139 char = 'x'
2127 2140 elif ctx.closesbranch():
2128 2141 char = '_'
2129 2142 copies = None
2130 2143 if getrenamed and ctx.rev():
2131 2144 copies = []
2132 2145 for fn in ctx.files():
2133 2146 rename = getrenamed(fn, ctx.rev())
2134 2147 if rename:
2135 2148 copies.append((fn, rename[0]))
2136 2149 revmatchfn = None
2137 2150 if filematcher is not None:
2138 2151 revmatchfn = filematcher(ctx.rev())
2139 2152 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2140 2153 lines = displayer.hunk.pop(rev).split('\n')
2141 2154 if not lines[-1]:
2142 2155 del lines[-1]
2143 2156 displayer.flush(ctx)
2144 2157 edges = edgefn(type, char, lines, seen, rev, parents)
2145 2158 for type, char, lines, coldata in edges:
2146 2159 graphmod.ascii(ui, state, type, char, lines, coldata)
2147 2160 displayer.close()
2148 2161
2149 2162 def graphlog(ui, repo, *pats, **opts):
2150 2163 # Parameters are identical to log command ones
2151 2164 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2152 2165 revdag = graphmod.dagwalker(repo, revs)
2153 2166
2154 2167 getrenamed = None
2155 2168 if opts.get('copies'):
2156 2169 endrev = None
2157 2170 if opts.get('rev'):
2158 2171 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2159 2172 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2160 2173 displayer = show_changeset(ui, repo, opts, buffered=True)
2161 2174 showparents = [ctx.node() for ctx in repo[None].parents()]
2162 2175 displaygraph(ui, revdag, displayer, showparents,
2163 2176 graphmod.asciiedges, getrenamed, filematcher)
2164 2177
2165 2178 def checkunsupportedgraphflags(pats, opts):
2166 2179 for op in ["newest_first"]:
2167 2180 if op in opts and opts[op]:
2168 2181 raise util.Abort(_("-G/--graph option is incompatible with --%s")
2169 2182 % op.replace("_", "-"))
2170 2183
2171 2184 def graphrevs(repo, nodes, opts):
2172 2185 limit = loglimit(opts)
2173 2186 nodes.reverse()
2174 2187 if limit is not None:
2175 2188 nodes = nodes[:limit]
2176 2189 return graphmod.nodes(repo, nodes)
2177 2190
2178 2191 def add(ui, repo, match, prefix, explicitonly, **opts):
2179 2192 join = lambda f: os.path.join(prefix, f)
2180 2193 bad = []
2181 2194
2182 2195 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2183 2196 names = []
2184 2197 wctx = repo[None]
2185 2198 cca = None
2186 2199 abort, warn = scmutil.checkportabilityalert(ui)
2187 2200 if abort or warn:
2188 2201 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2189 2202
2190 2203 badmatch = matchmod.badmatch(match, badfn)
2191 2204 dirstate = repo.dirstate
2192 2205 # We don't want to just call wctx.walk here, since it would return a lot of
2193 2206 # clean files, which we aren't interested in and takes time.
2194 2207 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2195 2208 True, False, full=False)):
2196 2209 exact = match.exact(f)
2197 2210 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2198 2211 if cca:
2199 2212 cca(f)
2200 2213 names.append(f)
2201 2214 if ui.verbose or not exact:
2202 2215 ui.status(_('adding %s\n') % match.rel(f))
2203 2216
2204 2217 for subpath in sorted(wctx.substate):
2205 2218 sub = wctx.sub(subpath)
2206 2219 try:
2207 2220 submatch = matchmod.narrowmatcher(subpath, match)
2208 2221 if opts.get('subrepos'):
2209 2222 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2210 2223 else:
2211 2224 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2212 2225 except error.LookupError:
2213 2226 ui.status(_("skipping missing subrepository: %s\n")
2214 2227 % join(subpath))
2215 2228
2216 2229 if not opts.get('dry_run'):
2217 2230 rejected = wctx.add(names, prefix)
2218 2231 bad.extend(f for f in rejected if f in match.files())
2219 2232 return bad
2220 2233
2221 2234 def forget(ui, repo, match, prefix, explicitonly):
2222 2235 join = lambda f: os.path.join(prefix, f)
2223 2236 bad = []
2224 2237 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2225 2238 wctx = repo[None]
2226 2239 forgot = []
2227 2240
2228 2241 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2229 2242 forget = sorted(s[0] + s[1] + s[3] + s[6])
2230 2243 if explicitonly:
2231 2244 forget = [f for f in forget if match.exact(f)]
2232 2245
2233 2246 for subpath in sorted(wctx.substate):
2234 2247 sub = wctx.sub(subpath)
2235 2248 try:
2236 2249 submatch = matchmod.narrowmatcher(subpath, match)
2237 2250 subbad, subforgot = sub.forget(submatch, prefix)
2238 2251 bad.extend([subpath + '/' + f for f in subbad])
2239 2252 forgot.extend([subpath + '/' + f for f in subforgot])
2240 2253 except error.LookupError:
2241 2254 ui.status(_("skipping missing subrepository: %s\n")
2242 2255 % join(subpath))
2243 2256
2244 2257 if not explicitonly:
2245 2258 for f in match.files():
2246 2259 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2247 2260 if f not in forgot:
2248 2261 if repo.wvfs.exists(f):
2249 2262 # Don't complain if the exact case match wasn't given.
2250 2263 # But don't do this until after checking 'forgot', so
2251 2264 # that subrepo files aren't normalized, and this op is
2252 2265 # purely from data cached by the status walk above.
2253 2266 if repo.dirstate.normalize(f) in repo.dirstate:
2254 2267 continue
2255 2268 ui.warn(_('not removing %s: '
2256 2269 'file is already untracked\n')
2257 2270 % match.rel(f))
2258 2271 bad.append(f)
2259 2272
2260 2273 for f in forget:
2261 2274 if ui.verbose or not match.exact(f):
2262 2275 ui.status(_('removing %s\n') % match.rel(f))
2263 2276
2264 2277 rejected = wctx.forget(forget, prefix)
2265 2278 bad.extend(f for f in rejected if f in match.files())
2266 2279 forgot.extend(f for f in forget if f not in rejected)
2267 2280 return bad, forgot
2268 2281
2269 2282 def files(ui, ctx, m, fm, fmt, subrepos):
2270 2283 rev = ctx.rev()
2271 2284 ret = 1
2272 2285 ds = ctx.repo().dirstate
2273 2286
2274 2287 for f in ctx.matches(m):
2275 2288 if rev is None and ds[f] == 'r':
2276 2289 continue
2277 2290 fm.startitem()
2278 2291 if ui.verbose:
2279 2292 fc = ctx[f]
2280 2293 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2281 2294 fm.data(abspath=f)
2282 2295 fm.write('path', fmt, m.rel(f))
2283 2296 ret = 0
2284 2297
2285 2298 for subpath in sorted(ctx.substate):
2286 2299 def matchessubrepo(subpath):
2287 2300 return (m.always() or m.exact(subpath)
2288 2301 or any(f.startswith(subpath + '/') for f in m.files()))
2289 2302
2290 2303 if subrepos or matchessubrepo(subpath):
2291 2304 sub = ctx.sub(subpath)
2292 2305 try:
2293 2306 submatch = matchmod.narrowmatcher(subpath, m)
2294 2307 if sub.printfiles(ui, submatch, fm, fmt, subrepos) == 0:
2295 2308 ret = 0
2296 2309 except error.LookupError:
2297 2310 ui.status(_("skipping missing subrepository: %s\n")
2298 2311 % m.abs(subpath))
2299 2312
2300 2313 return ret
2301 2314
2302 2315 def remove(ui, repo, m, prefix, after, force, subrepos):
2303 2316 join = lambda f: os.path.join(prefix, f)
2304 2317 ret = 0
2305 2318 s = repo.status(match=m, clean=True)
2306 2319 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2307 2320
2308 2321 wctx = repo[None]
2309 2322
2310 2323 for subpath in sorted(wctx.substate):
2311 2324 def matchessubrepo(matcher, subpath):
2312 2325 if matcher.exact(subpath):
2313 2326 return True
2314 2327 for f in matcher.files():
2315 2328 if f.startswith(subpath):
2316 2329 return True
2317 2330 return False
2318 2331
2319 2332 if subrepos or matchessubrepo(m, subpath):
2320 2333 sub = wctx.sub(subpath)
2321 2334 try:
2322 2335 submatch = matchmod.narrowmatcher(subpath, m)
2323 2336 if sub.removefiles(submatch, prefix, after, force, subrepos):
2324 2337 ret = 1
2325 2338 except error.LookupError:
2326 2339 ui.status(_("skipping missing subrepository: %s\n")
2327 2340 % join(subpath))
2328 2341
2329 2342 # warn about failure to delete explicit files/dirs
2330 2343 deleteddirs = util.dirs(deleted)
2331 2344 for f in m.files():
2332 2345 def insubrepo():
2333 2346 for subpath in wctx.substate:
2334 2347 if f.startswith(subpath):
2335 2348 return True
2336 2349 return False
2337 2350
2338 2351 isdir = f in deleteddirs or wctx.hasdir(f)
2339 2352 if f in repo.dirstate or isdir or f == '.' or insubrepo():
2340 2353 continue
2341 2354
2342 2355 if repo.wvfs.exists(f):
2343 2356 if repo.wvfs.isdir(f):
2344 2357 ui.warn(_('not removing %s: no tracked files\n')
2345 2358 % m.rel(f))
2346 2359 else:
2347 2360 ui.warn(_('not removing %s: file is untracked\n')
2348 2361 % m.rel(f))
2349 2362 # missing files will generate a warning elsewhere
2350 2363 ret = 1
2351 2364
2352 2365 if force:
2353 2366 list = modified + deleted + clean + added
2354 2367 elif after:
2355 2368 list = deleted
2356 2369 for f in modified + added + clean:
2357 2370 ui.warn(_('not removing %s: file still exists\n') % m.rel(f))
2358 2371 ret = 1
2359 2372 else:
2360 2373 list = deleted + clean
2361 2374 for f in modified:
2362 2375 ui.warn(_('not removing %s: file is modified (use -f'
2363 2376 ' to force removal)\n') % m.rel(f))
2364 2377 ret = 1
2365 2378 for f in added:
2366 2379 ui.warn(_('not removing %s: file has been marked for add'
2367 2380 ' (use forget to undo)\n') % m.rel(f))
2368 2381 ret = 1
2369 2382
2370 2383 for f in sorted(list):
2371 2384 if ui.verbose or not m.exact(f):
2372 2385 ui.status(_('removing %s\n') % m.rel(f))
2373 2386
2374 2387 wlock = repo.wlock()
2375 2388 try:
2376 2389 if not after:
2377 2390 for f in list:
2378 2391 if f in added:
2379 2392 continue # we never unlink added files on remove
2380 2393 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
2381 2394 repo[None].forget(list)
2382 2395 finally:
2383 2396 wlock.release()
2384 2397
2385 2398 return ret
2386 2399
2387 2400 def cat(ui, repo, ctx, matcher, prefix, **opts):
2388 2401 err = 1
2389 2402
2390 2403 def write(path):
2391 2404 fp = makefileobj(repo, opts.get('output'), ctx.node(),
2392 2405 pathname=os.path.join(prefix, path))
2393 2406 data = ctx[path].data()
2394 2407 if opts.get('decode'):
2395 2408 data = repo.wwritedata(path, data)
2396 2409 fp.write(data)
2397 2410 fp.close()
2398 2411
2399 2412 # Automation often uses hg cat on single files, so special case it
2400 2413 # for performance to avoid the cost of parsing the manifest.
2401 2414 if len(matcher.files()) == 1 and not matcher.anypats():
2402 2415 file = matcher.files()[0]
2403 2416 mf = repo.manifest
2404 2417 mfnode = ctx.manifestnode()
2405 2418 if mfnode and mf.find(mfnode, file)[0]:
2406 2419 write(file)
2407 2420 return 0
2408 2421
2409 2422 # Don't warn about "missing" files that are really in subrepos
2410 2423 def badfn(path, msg):
2411 2424 for subpath in ctx.substate:
2412 2425 if path.startswith(subpath):
2413 2426 return
2414 2427 matcher.bad(path, msg)
2415 2428
2416 2429 for abs in ctx.walk(matchmod.badmatch(matcher, badfn)):
2417 2430 write(abs)
2418 2431 err = 0
2419 2432
2420 2433 for subpath in sorted(ctx.substate):
2421 2434 sub = ctx.sub(subpath)
2422 2435 try:
2423 2436 submatch = matchmod.narrowmatcher(subpath, matcher)
2424 2437
2425 2438 if not sub.cat(submatch, os.path.join(prefix, sub._path),
2426 2439 **opts):
2427 2440 err = 0
2428 2441 except error.RepoLookupError:
2429 2442 ui.status(_("skipping missing subrepository: %s\n")
2430 2443 % os.path.join(prefix, subpath))
2431 2444
2432 2445 return err
2433 2446
2434 2447 def commit(ui, repo, commitfunc, pats, opts):
2435 2448 '''commit the specified files or all outstanding changes'''
2436 2449 date = opts.get('date')
2437 2450 if date:
2438 2451 opts['date'] = util.parsedate(date)
2439 2452 message = logmessage(ui, opts)
2440 2453 matcher = scmutil.match(repo[None], pats, opts)
2441 2454
2442 2455 # extract addremove carefully -- this function can be called from a command
2443 2456 # that doesn't support addremove
2444 2457 if opts.get('addremove'):
2445 2458 if scmutil.addremove(repo, matcher, "", opts) != 0:
2446 2459 raise util.Abort(
2447 2460 _("failed to mark all new/missing files as added/removed"))
2448 2461
2449 2462 return commitfunc(ui, repo, message, matcher, opts)
2450 2463
2451 2464 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2452 2465 # avoid cycle context -> subrepo -> cmdutil
2453 2466 import context
2454 2467
2455 2468 # amend will reuse the existing user if not specified, but the obsolete
2456 2469 # marker creation requires that the current user's name is specified.
2457 2470 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2458 2471 ui.username() # raise exception if username not set
2459 2472
2460 2473 ui.note(_('amending changeset %s\n') % old)
2461 2474 base = old.p1()
2462 2475 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2463 2476
2464 2477 wlock = dsguard = lock = newid = None
2465 2478 try:
2466 2479 wlock = repo.wlock()
2467 2480 dsguard = dirstateguard(repo, 'amend')
2468 2481 lock = repo.lock()
2469 2482 tr = repo.transaction('amend')
2470 2483 try:
2471 2484 # See if we got a message from -m or -l, if not, open the editor
2472 2485 # with the message of the changeset to amend
2473 2486 message = logmessage(ui, opts)
2474 2487 # ensure logfile does not conflict with later enforcement of the
2475 2488 # message. potential logfile content has been processed by
2476 2489 # `logmessage` anyway.
2477 2490 opts.pop('logfile')
2478 2491 # First, do a regular commit to record all changes in the working
2479 2492 # directory (if there are any)
2480 2493 ui.callhooks = False
2481 2494 activebookmark = repo._activebookmark
2482 2495 try:
2483 2496 repo._activebookmark = None
2484 2497 opts['message'] = 'temporary amend commit for %s' % old
2485 2498 node = commit(ui, repo, commitfunc, pats, opts)
2486 2499 finally:
2487 2500 repo._activebookmark = activebookmark
2488 2501 ui.callhooks = True
2489 2502 ctx = repo[node]
2490 2503
2491 2504 # Participating changesets:
2492 2505 #
2493 2506 # node/ctx o - new (intermediate) commit that contains changes
2494 2507 # | from working dir to go into amending commit
2495 2508 # | (or a workingctx if there were no changes)
2496 2509 # |
2497 2510 # old o - changeset to amend
2498 2511 # |
2499 2512 # base o - parent of amending changeset
2500 2513
2501 2514 # Update extra dict from amended commit (e.g. to preserve graft
2502 2515 # source)
2503 2516 extra.update(old.extra())
2504 2517
2505 2518 # Also update it from the intermediate commit or from the wctx
2506 2519 extra.update(ctx.extra())
2507 2520
2508 2521 if len(old.parents()) > 1:
2509 2522 # ctx.files() isn't reliable for merges, so fall back to the
2510 2523 # slower repo.status() method
2511 2524 files = set([fn for st in repo.status(base, old)[:3]
2512 2525 for fn in st])
2513 2526 else:
2514 2527 files = set(old.files())
2515 2528
2516 2529 # Second, we use either the commit we just did, or if there were no
2517 2530 # changes the parent of the working directory as the version of the
2518 2531 # files in the final amend commit
2519 2532 if node:
2520 2533 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2521 2534
2522 2535 user = ctx.user()
2523 2536 date = ctx.date()
2524 2537 # Recompute copies (avoid recording a -> b -> a)
2525 2538 copied = copies.pathcopies(base, ctx)
2526 2539 if old.p2:
2527 2540 copied.update(copies.pathcopies(old.p2(), ctx))
2528 2541
2529 2542 # Prune files which were reverted by the updates: if old
2530 2543 # introduced file X and our intermediate commit, node,
2531 2544 # renamed that file, then those two files are the same and
2532 2545 # we can discard X from our list of files. Likewise if X
2533 2546 # was deleted, it's no longer relevant
2534 2547 files.update(ctx.files())
2535 2548
2536 2549 def samefile(f):
2537 2550 if f in ctx.manifest():
2538 2551 a = ctx.filectx(f)
2539 2552 if f in base.manifest():
2540 2553 b = base.filectx(f)
2541 2554 return (not a.cmp(b)
2542 2555 and a.flags() == b.flags())
2543 2556 else:
2544 2557 return False
2545 2558 else:
2546 2559 return f not in base.manifest()
2547 2560 files = [f for f in files if not samefile(f)]
2548 2561
2549 2562 def filectxfn(repo, ctx_, path):
2550 2563 try:
2551 2564 fctx = ctx[path]
2552 2565 flags = fctx.flags()
2553 2566 mctx = context.memfilectx(repo,
2554 2567 fctx.path(), fctx.data(),
2555 2568 islink='l' in flags,
2556 2569 isexec='x' in flags,
2557 2570 copied=copied.get(path))
2558 2571 return mctx
2559 2572 except KeyError:
2560 2573 return None
2561 2574 else:
2562 2575 ui.note(_('copying changeset %s to %s\n') % (old, base))
2563 2576
2564 2577 # Use version of files as in the old cset
2565 2578 def filectxfn(repo, ctx_, path):
2566 2579 try:
2567 2580 return old.filectx(path)
2568 2581 except KeyError:
2569 2582 return None
2570 2583
2571 2584 user = opts.get('user') or old.user()
2572 2585 date = opts.get('date') or old.date()
2573 2586 editform = mergeeditform(old, 'commit.amend')
2574 2587 editor = getcommiteditor(editform=editform, **opts)
2575 2588 if not message:
2576 2589 editor = getcommiteditor(edit=True, editform=editform)
2577 2590 message = old.description()
2578 2591
2579 2592 pureextra = extra.copy()
2580 2593 extra['amend_source'] = old.hex()
2581 2594
2582 2595 new = context.memctx(repo,
2583 2596 parents=[base.node(), old.p2().node()],
2584 2597 text=message,
2585 2598 files=files,
2586 2599 filectxfn=filectxfn,
2587 2600 user=user,
2588 2601 date=date,
2589 2602 extra=extra,
2590 2603 editor=editor)
2591 2604
2592 2605 newdesc = changelog.stripdesc(new.description())
2593 2606 if ((not node)
2594 2607 and newdesc == old.description()
2595 2608 and user == old.user()
2596 2609 and date == old.date()
2597 2610 and pureextra == old.extra()):
2598 2611 # nothing changed. continuing here would create a new node
2599 2612 # anyway because of the amend_source noise.
2600 2613 #
2601 2614 # This not what we expect from amend.
2602 2615 return old.node()
2603 2616
2604 2617 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2605 2618 try:
2606 2619 if opts.get('secret'):
2607 2620 commitphase = 'secret'
2608 2621 else:
2609 2622 commitphase = old.phase()
2610 2623 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2611 2624 newid = repo.commitctx(new)
2612 2625 finally:
2613 2626 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2614 2627 if newid != old.node():
2615 2628 # Reroute the working copy parent to the new changeset
2616 2629 repo.setparents(newid, nullid)
2617 2630
2618 2631 # Move bookmarks from old parent to amend commit
2619 2632 bms = repo.nodebookmarks(old.node())
2620 2633 if bms:
2621 2634 marks = repo._bookmarks
2622 2635 for bm in bms:
2623 2636 ui.debug('moving bookmarks %r from %s to %s\n' %
2624 2637 (marks, old.hex(), hex(newid)))
2625 2638 marks[bm] = newid
2626 2639 marks.recordchange(tr)
2627 2640 #commit the whole amend process
2628 2641 if createmarkers:
2629 2642 # mark the new changeset as successor of the rewritten one
2630 2643 new = repo[newid]
2631 2644 obs = [(old, (new,))]
2632 2645 if node:
2633 2646 obs.append((ctx, ()))
2634 2647
2635 2648 obsolete.createmarkers(repo, obs)
2636 2649 tr.close()
2637 2650 finally:
2638 2651 tr.release()
2639 2652 dsguard.close()
2640 2653 if not createmarkers and newid != old.node():
2641 2654 # Strip the intermediate commit (if there was one) and the amended
2642 2655 # commit
2643 2656 if node:
2644 2657 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2645 2658 ui.note(_('stripping amended changeset %s\n') % old)
2646 2659 repair.strip(ui, repo, old.node(), topic='amend-backup')
2647 2660 finally:
2648 2661 lockmod.release(lock, dsguard, wlock)
2649 2662 return newid
2650 2663
2651 2664 def commiteditor(repo, ctx, subs, editform=''):
2652 2665 if ctx.description():
2653 2666 return ctx.description()
2654 2667 return commitforceeditor(repo, ctx, subs, editform=editform)
2655 2668
2656 2669 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2657 2670 editform=''):
2658 2671 if not extramsg:
2659 2672 extramsg = _("Leave message empty to abort commit.")
2660 2673
2661 2674 forms = [e for e in editform.split('.') if e]
2662 2675 forms.insert(0, 'changeset')
2663 2676 while forms:
2664 2677 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2665 2678 if tmpl:
2666 2679 committext = buildcommittemplate(repo, ctx, subs, extramsg, tmpl)
2667 2680 break
2668 2681 forms.pop()
2669 2682 else:
2670 2683 committext = buildcommittext(repo, ctx, subs, extramsg)
2671 2684
2672 2685 # run editor in the repository root
2673 2686 olddir = os.getcwd()
2674 2687 os.chdir(repo.root)
2675 2688 text = repo.ui.edit(committext, ctx.user(), ctx.extra(), editform=editform)
2676 2689 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2677 2690 os.chdir(olddir)
2678 2691
2679 2692 if finishdesc:
2680 2693 text = finishdesc(text)
2681 2694 if not text.strip():
2682 2695 raise util.Abort(_("empty commit message"))
2683 2696
2684 2697 return text
2685 2698
2686 2699 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2687 2700 ui = repo.ui
2688 2701 tmpl, mapfile = gettemplate(ui, tmpl, None)
2689 2702
2690 2703 try:
2691 2704 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2692 2705 except SyntaxError as inst:
2693 2706 raise util.Abort(inst.args[0])
2694 2707
2695 2708 for k, v in repo.ui.configitems('committemplate'):
2696 2709 if k != 'changeset':
2697 2710 t.t.cache[k] = v
2698 2711
2699 2712 if not extramsg:
2700 2713 extramsg = '' # ensure that extramsg is string
2701 2714
2702 2715 ui.pushbuffer()
2703 2716 t.show(ctx, extramsg=extramsg)
2704 2717 return ui.popbuffer()
2705 2718
2706 2719 def hgprefix(msg):
2707 2720 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2708 2721
2709 2722 def buildcommittext(repo, ctx, subs, extramsg):
2710 2723 edittext = []
2711 2724 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2712 2725 if ctx.description():
2713 2726 edittext.append(ctx.description())
2714 2727 edittext.append("")
2715 2728 edittext.append("") # Empty line between message and comments.
2716 2729 edittext.append(hgprefix(_("Enter commit message."
2717 2730 " Lines beginning with 'HG:' are removed.")))
2718 2731 edittext.append(hgprefix(extramsg))
2719 2732 edittext.append("HG: --")
2720 2733 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2721 2734 if ctx.p2():
2722 2735 edittext.append(hgprefix(_("branch merge")))
2723 2736 if ctx.branch():
2724 2737 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2725 2738 if bookmarks.isactivewdirparent(repo):
2726 2739 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2727 2740 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2728 2741 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2729 2742 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2730 2743 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2731 2744 if not added and not modified and not removed:
2732 2745 edittext.append(hgprefix(_("no files changed")))
2733 2746 edittext.append("")
2734 2747
2735 2748 return "\n".join(edittext)
2736 2749
2737 2750 def commitstatus(repo, node, branch, bheads=None, opts=None):
2738 2751 if opts is None:
2739 2752 opts = {}
2740 2753 ctx = repo[node]
2741 2754 parents = ctx.parents()
2742 2755
2743 2756 if (not opts.get('amend') and bheads and node not in bheads and not
2744 2757 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2745 2758 repo.ui.status(_('created new head\n'))
2746 2759 # The message is not printed for initial roots. For the other
2747 2760 # changesets, it is printed in the following situations:
2748 2761 #
2749 2762 # Par column: for the 2 parents with ...
2750 2763 # N: null or no parent
2751 2764 # B: parent is on another named branch
2752 2765 # C: parent is a regular non head changeset
2753 2766 # H: parent was a branch head of the current branch
2754 2767 # Msg column: whether we print "created new head" message
2755 2768 # In the following, it is assumed that there already exists some
2756 2769 # initial branch heads of the current branch, otherwise nothing is
2757 2770 # printed anyway.
2758 2771 #
2759 2772 # Par Msg Comment
2760 2773 # N N y additional topo root
2761 2774 #
2762 2775 # B N y additional branch root
2763 2776 # C N y additional topo head
2764 2777 # H N n usual case
2765 2778 #
2766 2779 # B B y weird additional branch root
2767 2780 # C B y branch merge
2768 2781 # H B n merge with named branch
2769 2782 #
2770 2783 # C C y additional head from merge
2771 2784 # C H n merge with a head
2772 2785 #
2773 2786 # H H n head merge: head count decreases
2774 2787
2775 2788 if not opts.get('close_branch'):
2776 2789 for r in parents:
2777 2790 if r.closesbranch() and r.branch() == branch:
2778 2791 repo.ui.status(_('reopening closed branch head %d\n') % r)
2779 2792
2780 2793 if repo.ui.debugflag:
2781 2794 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2782 2795 elif repo.ui.verbose:
2783 2796 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2784 2797
2785 2798 def revert(ui, repo, ctx, parents, *pats, **opts):
2786 2799 parent, p2 = parents
2787 2800 node = ctx.node()
2788 2801
2789 2802 mf = ctx.manifest()
2790 2803 if node == p2:
2791 2804 parent = p2
2792 2805 if node == parent:
2793 2806 pmf = mf
2794 2807 else:
2795 2808 pmf = None
2796 2809
2797 2810 # need all matching names in dirstate and manifest of target rev,
2798 2811 # so have to walk both. do not print errors if files exist in one
2799 2812 # but not other. in both cases, filesets should be evaluated against
2800 2813 # workingctx to get consistent result (issue4497). this means 'set:**'
2801 2814 # cannot be used to select missing files from target rev.
2802 2815
2803 2816 # `names` is a mapping for all elements in working copy and target revision
2804 2817 # The mapping is in the form:
2805 2818 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2806 2819 names = {}
2807 2820
2808 2821 wlock = repo.wlock()
2809 2822 try:
2810 2823 ## filling of the `names` mapping
2811 2824 # walk dirstate to fill `names`
2812 2825
2813 2826 interactive = opts.get('interactive', False)
2814 2827 wctx = repo[None]
2815 2828 m = scmutil.match(wctx, pats, opts)
2816 2829
2817 2830 # we'll need this later
2818 2831 targetsubs = sorted(s for s in wctx.substate if m(s))
2819 2832
2820 2833 if not m.always():
2821 2834 for abs in repo.walk(matchmod.badmatch(m, lambda x, y: False)):
2822 2835 names[abs] = m.rel(abs), m.exact(abs)
2823 2836
2824 2837 # walk target manifest to fill `names`
2825 2838
2826 2839 def badfn(path, msg):
2827 2840 if path in names:
2828 2841 return
2829 2842 if path in ctx.substate:
2830 2843 return
2831 2844 path_ = path + '/'
2832 2845 for f in names:
2833 2846 if f.startswith(path_):
2834 2847 return
2835 2848 ui.warn("%s: %s\n" % (m.rel(path), msg))
2836 2849
2837 2850 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
2838 2851 if abs not in names:
2839 2852 names[abs] = m.rel(abs), m.exact(abs)
2840 2853
2841 2854 # Find status of all file in `names`.
2842 2855 m = scmutil.matchfiles(repo, names)
2843 2856
2844 2857 changes = repo.status(node1=node, match=m,
2845 2858 unknown=True, ignored=True, clean=True)
2846 2859 else:
2847 2860 changes = repo.status(node1=node, match=m)
2848 2861 for kind in changes:
2849 2862 for abs in kind:
2850 2863 names[abs] = m.rel(abs), m.exact(abs)
2851 2864
2852 2865 m = scmutil.matchfiles(repo, names)
2853 2866
2854 2867 modified = set(changes.modified)
2855 2868 added = set(changes.added)
2856 2869 removed = set(changes.removed)
2857 2870 _deleted = set(changes.deleted)
2858 2871 unknown = set(changes.unknown)
2859 2872 unknown.update(changes.ignored)
2860 2873 clean = set(changes.clean)
2861 2874 modadded = set()
2862 2875
2863 2876 # split between files known in target manifest and the others
2864 2877 smf = set(mf)
2865 2878
2866 2879 # determine the exact nature of the deleted changesets
2867 2880 deladded = _deleted - smf
2868 2881 deleted = _deleted - deladded
2869 2882
2870 2883 # We need to account for the state of the file in the dirstate,
2871 2884 # even when we revert against something else than parent. This will
2872 2885 # slightly alter the behavior of revert (doing back up or not, delete
2873 2886 # or just forget etc).
2874 2887 if parent == node:
2875 2888 dsmodified = modified
2876 2889 dsadded = added
2877 2890 dsremoved = removed
2878 2891 # store all local modifications, useful later for rename detection
2879 2892 localchanges = dsmodified | dsadded
2880 2893 modified, added, removed = set(), set(), set()
2881 2894 else:
2882 2895 changes = repo.status(node1=parent, match=m)
2883 2896 dsmodified = set(changes.modified)
2884 2897 dsadded = set(changes.added)
2885 2898 dsremoved = set(changes.removed)
2886 2899 # store all local modifications, useful later for rename detection
2887 2900 localchanges = dsmodified | dsadded
2888 2901
2889 2902 # only take into account for removes between wc and target
2890 2903 clean |= dsremoved - removed
2891 2904 dsremoved &= removed
2892 2905 # distinct between dirstate remove and other
2893 2906 removed -= dsremoved
2894 2907
2895 2908 modadded = added & dsmodified
2896 2909 added -= modadded
2897 2910
2898 2911 # tell newly modified apart.
2899 2912 dsmodified &= modified
2900 2913 dsmodified |= modified & dsadded # dirstate added may needs backup
2901 2914 modified -= dsmodified
2902 2915
2903 2916 # We need to wait for some post-processing to update this set
2904 2917 # before making the distinction. The dirstate will be used for
2905 2918 # that purpose.
2906 2919 dsadded = added
2907 2920
2908 2921 # in case of merge, files that are actually added can be reported as
2909 2922 # modified, we need to post process the result
2910 2923 if p2 != nullid:
2911 2924 if pmf is None:
2912 2925 # only need parent manifest in the merge case,
2913 2926 # so do not read by default
2914 2927 pmf = repo[parent].manifest()
2915 2928 mergeadd = dsmodified - set(pmf)
2916 2929 dsadded |= mergeadd
2917 2930 dsmodified -= mergeadd
2918 2931
2919 2932 # if f is a rename, update `names` to also revert the source
2920 2933 cwd = repo.getcwd()
2921 2934 for f in localchanges:
2922 2935 src = repo.dirstate.copied(f)
2923 2936 # XXX should we check for rename down to target node?
2924 2937 if src and src not in names and repo.dirstate[src] == 'r':
2925 2938 dsremoved.add(src)
2926 2939 names[src] = (repo.pathto(src, cwd), True)
2927 2940
2928 2941 # distinguish between file to forget and the other
2929 2942 added = set()
2930 2943 for abs in dsadded:
2931 2944 if repo.dirstate[abs] != 'a':
2932 2945 added.add(abs)
2933 2946 dsadded -= added
2934 2947
2935 2948 for abs in deladded:
2936 2949 if repo.dirstate[abs] == 'a':
2937 2950 dsadded.add(abs)
2938 2951 deladded -= dsadded
2939 2952
2940 2953 # For files marked as removed, we check if an unknown file is present at
2941 2954 # the same path. If a such file exists it may need to be backed up.
2942 2955 # Making the distinction at this stage helps have simpler backup
2943 2956 # logic.
2944 2957 removunk = set()
2945 2958 for abs in removed:
2946 2959 target = repo.wjoin(abs)
2947 2960 if os.path.lexists(target):
2948 2961 removunk.add(abs)
2949 2962 removed -= removunk
2950 2963
2951 2964 dsremovunk = set()
2952 2965 for abs in dsremoved:
2953 2966 target = repo.wjoin(abs)
2954 2967 if os.path.lexists(target):
2955 2968 dsremovunk.add(abs)
2956 2969 dsremoved -= dsremovunk
2957 2970
2958 2971 # action to be actually performed by revert
2959 2972 # (<list of file>, message>) tuple
2960 2973 actions = {'revert': ([], _('reverting %s\n')),
2961 2974 'add': ([], _('adding %s\n')),
2962 2975 'remove': ([], _('removing %s\n')),
2963 2976 'drop': ([], _('removing %s\n')),
2964 2977 'forget': ([], _('forgetting %s\n')),
2965 2978 'undelete': ([], _('undeleting %s\n')),
2966 2979 'noop': (None, _('no changes needed to %s\n')),
2967 2980 'unknown': (None, _('file not managed: %s\n')),
2968 2981 }
2969 2982
2970 2983 # "constant" that convey the backup strategy.
2971 2984 # All set to `discard` if `no-backup` is set do avoid checking
2972 2985 # no_backup lower in the code.
2973 2986 # These values are ordered for comparison purposes
2974 2987 backup = 2 # unconditionally do backup
2975 2988 check = 1 # check if the existing file differs from target
2976 2989 discard = 0 # never do backup
2977 2990 if opts.get('no_backup'):
2978 2991 backup = check = discard
2979 2992
2980 2993 backupanddel = actions['remove']
2981 2994 if not opts.get('no_backup'):
2982 2995 backupanddel = actions['drop']
2983 2996
2984 2997 disptable = (
2985 2998 # dispatch table:
2986 2999 # file state
2987 3000 # action
2988 3001 # make backup
2989 3002
2990 3003 ## Sets that results that will change file on disk
2991 3004 # Modified compared to target, no local change
2992 3005 (modified, actions['revert'], discard),
2993 3006 # Modified compared to target, but local file is deleted
2994 3007 (deleted, actions['revert'], discard),
2995 3008 # Modified compared to target, local change
2996 3009 (dsmodified, actions['revert'], backup),
2997 3010 # Added since target
2998 3011 (added, actions['remove'], discard),
2999 3012 # Added in working directory
3000 3013 (dsadded, actions['forget'], discard),
3001 3014 # Added since target, have local modification
3002 3015 (modadded, backupanddel, backup),
3003 3016 # Added since target but file is missing in working directory
3004 3017 (deladded, actions['drop'], discard),
3005 3018 # Removed since target, before working copy parent
3006 3019 (removed, actions['add'], discard),
3007 3020 # Same as `removed` but an unknown file exists at the same path
3008 3021 (removunk, actions['add'], check),
3009 3022 # Removed since targe, marked as such in working copy parent
3010 3023 (dsremoved, actions['undelete'], discard),
3011 3024 # Same as `dsremoved` but an unknown file exists at the same path
3012 3025 (dsremovunk, actions['undelete'], check),
3013 3026 ## the following sets does not result in any file changes
3014 3027 # File with no modification
3015 3028 (clean, actions['noop'], discard),
3016 3029 # Existing file, not tracked anywhere
3017 3030 (unknown, actions['unknown'], discard),
3018 3031 )
3019 3032
3020 3033 for abs, (rel, exact) in sorted(names.items()):
3021 3034 # target file to be touch on disk (relative to cwd)
3022 3035 target = repo.wjoin(abs)
3023 3036 # search the entry in the dispatch table.
3024 3037 # if the file is in any of these sets, it was touched in the working
3025 3038 # directory parent and we are sure it needs to be reverted.
3026 3039 for table, (xlist, msg), dobackup in disptable:
3027 3040 if abs not in table:
3028 3041 continue
3029 3042 if xlist is not None:
3030 3043 xlist.append(abs)
3031 3044 if dobackup and (backup <= dobackup
3032 3045 or wctx[abs].cmp(ctx[abs])):
3033 3046 bakname = "%s.orig" % rel
3034 3047 ui.note(_('saving current version of %s as %s\n') %
3035 3048 (rel, bakname))
3036 3049 if not opts.get('dry_run'):
3037 3050 if interactive:
3038 3051 util.copyfile(target, bakname)
3039 3052 else:
3040 3053 util.rename(target, bakname)
3041 3054 if ui.verbose or not exact:
3042 3055 if not isinstance(msg, basestring):
3043 3056 msg = msg(abs)
3044 3057 ui.status(msg % rel)
3045 3058 elif exact:
3046 3059 ui.warn(msg % rel)
3047 3060 break
3048 3061
3049 3062 if not opts.get('dry_run'):
3050 3063 needdata = ('revert', 'add', 'undelete')
3051 3064 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3052 3065 _performrevert(repo, parents, ctx, actions, interactive)
3053 3066
3054 3067 if targetsubs:
3055 3068 # Revert the subrepos on the revert list
3056 3069 for sub in targetsubs:
3057 3070 try:
3058 3071 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3059 3072 except KeyError:
3060 3073 raise util.Abort("subrepository '%s' does not exist in %s!"
3061 3074 % (sub, short(ctx.node())))
3062 3075 finally:
3063 3076 wlock.release()
3064 3077
3065 3078 def _revertprefetch(repo, ctx, *files):
3066 3079 """Let extension changing the storage layer prefetch content"""
3067 3080 pass
3068 3081
3069 3082 def _performrevert(repo, parents, ctx, actions, interactive=False):
3070 3083 """function that actually perform all the actions computed for revert
3071 3084
3072 3085 This is an independent function to let extension to plug in and react to
3073 3086 the imminent revert.
3074 3087
3075 3088 Make sure you have the working directory locked when calling this function.
3076 3089 """
3077 3090 parent, p2 = parents
3078 3091 node = ctx.node()
3079 3092 def checkout(f):
3080 3093 fc = ctx[f]
3081 3094 repo.wwrite(f, fc.data(), fc.flags())
3082 3095
3083 3096 audit_path = pathutil.pathauditor(repo.root)
3084 3097 for f in actions['forget'][0]:
3085 3098 repo.dirstate.drop(f)
3086 3099 for f in actions['remove'][0]:
3087 3100 audit_path(f)
3088 3101 try:
3089 3102 util.unlinkpath(repo.wjoin(f))
3090 3103 except OSError:
3091 3104 pass
3092 3105 repo.dirstate.remove(f)
3093 3106 for f in actions['drop'][0]:
3094 3107 audit_path(f)
3095 3108 repo.dirstate.remove(f)
3096 3109
3097 3110 normal = None
3098 3111 if node == parent:
3099 3112 # We're reverting to our parent. If possible, we'd like status
3100 3113 # to report the file as clean. We have to use normallookup for
3101 3114 # merges to avoid losing information about merged/dirty files.
3102 3115 if p2 != nullid:
3103 3116 normal = repo.dirstate.normallookup
3104 3117 else:
3105 3118 normal = repo.dirstate.normal
3106 3119
3107 3120 newlyaddedandmodifiedfiles = set()
3108 3121 if interactive:
3109 3122 # Prompt the user for changes to revert
3110 3123 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3111 3124 m = scmutil.match(ctx, torevert, {})
3112 3125 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3113 3126 diffopts.nodates = True
3114 3127 diffopts.git = True
3115 3128 reversehunks = repo.ui.configbool('experimental',
3116 3129 'revertalternateinteractivemode',
3117 3130 True)
3118 3131 if reversehunks:
3119 3132 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3120 3133 else:
3121 3134 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3122 3135 originalchunks = patch.parsepatch(diff)
3123 3136
3124 3137 try:
3125 3138
3126 3139 chunks = recordfilter(repo.ui, originalchunks)
3127 3140 if reversehunks:
3128 3141 chunks = patch.reversehunks(chunks)
3129 3142
3130 3143 except patch.PatchError as err:
3131 3144 raise util.Abort(_('error parsing patch: %s') % err)
3132 3145
3133 3146 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3134 3147 # Apply changes
3135 3148 fp = cStringIO.StringIO()
3136 3149 for c in chunks:
3137 3150 c.write(fp)
3138 3151 dopatch = fp.tell()
3139 3152 fp.seek(0)
3140 3153 if dopatch:
3141 3154 try:
3142 3155 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3143 3156 except patch.PatchError as err:
3144 3157 raise util.Abort(str(err))
3145 3158 del fp
3146 3159 else:
3147 3160 for f in actions['revert'][0]:
3148 3161 checkout(f)
3149 3162 if normal:
3150 3163 normal(f)
3151 3164
3152 3165 for f in actions['add'][0]:
3153 3166 # Don't checkout modified files, they are already created by the diff
3154 3167 if f not in newlyaddedandmodifiedfiles:
3155 3168 checkout(f)
3156 3169 repo.dirstate.add(f)
3157 3170
3158 3171 normal = repo.dirstate.normallookup
3159 3172 if node == parent and p2 == nullid:
3160 3173 normal = repo.dirstate.normal
3161 3174 for f in actions['undelete'][0]:
3162 3175 checkout(f)
3163 3176 normal(f)
3164 3177
3165 3178 copied = copies.pathcopies(repo[parent], ctx)
3166 3179
3167 3180 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3168 3181 if f in copied:
3169 3182 repo.dirstate.copy(copied[f], f)
3170 3183
3171 3184 def command(table):
3172 3185 """Returns a function object to be used as a decorator for making commands.
3173 3186
3174 3187 This function receives a command table as its argument. The table should
3175 3188 be a dict.
3176 3189
3177 3190 The returned function can be used as a decorator for adding commands
3178 3191 to that command table. This function accepts multiple arguments to define
3179 3192 a command.
3180 3193
3181 3194 The first argument is the command name.
3182 3195
3183 3196 The options argument is an iterable of tuples defining command arguments.
3184 3197 See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
3185 3198
3186 3199 The synopsis argument defines a short, one line summary of how to use the
3187 3200 command. This shows up in the help output.
3188 3201
3189 3202 The norepo argument defines whether the command does not require a
3190 3203 local repository. Most commands operate against a repository, thus the
3191 3204 default is False.
3192 3205
3193 3206 The optionalrepo argument defines whether the command optionally requires
3194 3207 a local repository.
3195 3208
3196 3209 The inferrepo argument defines whether to try to find a repository from the
3197 3210 command line arguments. If True, arguments will be examined for potential
3198 3211 repository locations. See ``findrepo()``. If a repository is found, it
3199 3212 will be used.
3200 3213 """
3201 3214 def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False,
3202 3215 inferrepo=False):
3203 3216 def decorator(func):
3204 3217 if synopsis:
3205 3218 table[name] = func, list(options), synopsis
3206 3219 else:
3207 3220 table[name] = func, list(options)
3208 3221
3209 3222 if norepo:
3210 3223 # Avoid import cycle.
3211 3224 import commands
3212 3225 commands.norepo += ' %s' % ' '.join(parsealiases(name))
3213 3226
3214 3227 if optionalrepo:
3215 3228 import commands
3216 3229 commands.optionalrepo += ' %s' % ' '.join(parsealiases(name))
3217 3230
3218 3231 if inferrepo:
3219 3232 import commands
3220 3233 commands.inferrepo += ' %s' % ' '.join(parsealiases(name))
3221 3234
3222 3235 return func
3223 3236 return decorator
3224 3237
3225 3238 return cmd
3226 3239
3227 3240 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3228 3241 # commands.outgoing. "missing" is "missing" of the result of
3229 3242 # "findcommonoutgoing()"
3230 3243 outgoinghooks = util.hooks()
3231 3244
3232 3245 # a list of (ui, repo) functions called by commands.summary
3233 3246 summaryhooks = util.hooks()
3234 3247
3235 3248 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3236 3249 #
3237 3250 # functions should return tuple of booleans below, if 'changes' is None:
3238 3251 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3239 3252 #
3240 3253 # otherwise, 'changes' is a tuple of tuples below:
3241 3254 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3242 3255 # - (desturl, destbranch, destpeer, outgoing)
3243 3256 summaryremotehooks = util.hooks()
3244 3257
3245 3258 # A list of state files kept by multistep operations like graft.
3246 3259 # Since graft cannot be aborted, it is considered 'clearable' by update.
3247 3260 # note: bisect is intentionally excluded
3248 3261 # (state file, clearable, allowcommit, error, hint)
3249 3262 unfinishedstates = [
3250 3263 ('graftstate', True, False, _('graft in progress'),
3251 3264 _("use 'hg graft --continue' or 'hg update' to abort")),
3252 3265 ('updatestate', True, False, _('last update was interrupted'),
3253 3266 _("use 'hg update' to get a consistent checkout"))
3254 3267 ]
3255 3268
3256 3269 def checkunfinished(repo, commit=False):
3257 3270 '''Look for an unfinished multistep operation, like graft, and abort
3258 3271 if found. It's probably good to check this right before
3259 3272 bailifchanged().
3260 3273 '''
3261 3274 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3262 3275 if commit and allowcommit:
3263 3276 continue
3264 3277 if repo.vfs.exists(f):
3265 3278 raise util.Abort(msg, hint=hint)
3266 3279
3267 3280 def clearunfinished(repo):
3268 3281 '''Check for unfinished operations (as above), and clear the ones
3269 3282 that are clearable.
3270 3283 '''
3271 3284 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3272 3285 if not clearable and repo.vfs.exists(f):
3273 3286 raise util.Abort(msg, hint=hint)
3274 3287 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3275 3288 if clearable and repo.vfs.exists(f):
3276 3289 util.unlink(repo.join(f))
3277 3290
3278 3291 class dirstateguard(object):
3279 3292 '''Restore dirstate at unexpected failure.
3280 3293
3281 3294 At the construction, this class does:
3282 3295
3283 3296 - write current ``repo.dirstate`` out, and
3284 3297 - save ``.hg/dirstate`` into the backup file
3285 3298
3286 3299 This restores ``.hg/dirstate`` from backup file, if ``release()``
3287 3300 is invoked before ``close()``.
3288 3301
3289 3302 This just removes the backup file at ``close()`` before ``release()``.
3290 3303 '''
3291 3304
3292 3305 def __init__(self, repo, name):
3293 3306 repo.dirstate.write()
3294 3307 self._repo = repo
3295 3308 self._filename = 'dirstate.backup.%s.%d' % (name, id(self))
3296 3309 repo.vfs.write(self._filename, repo.vfs.tryread('dirstate'))
3297 3310 self._active = True
3298 3311 self._closed = False
3299 3312
3300 3313 def __del__(self):
3301 3314 if self._active: # still active
3302 3315 # this may occur, even if this class is used correctly:
3303 3316 # for example, releasing other resources like transaction
3304 3317 # may raise exception before ``dirstateguard.release`` in
3305 3318 # ``release(tr, ....)``.
3306 3319 self._abort()
3307 3320
3308 3321 def close(self):
3309 3322 if not self._active: # already inactivated
3310 3323 msg = (_("can't close already inactivated backup: %s")
3311 3324 % self._filename)
3312 3325 raise util.Abort(msg)
3313 3326
3314 3327 self._repo.vfs.unlink(self._filename)
3315 3328 self._active = False
3316 3329 self._closed = True
3317 3330
3318 3331 def _abort(self):
3319 3332 # this "invalidate()" prevents "wlock.release()" from writing
3320 3333 # changes of dirstate out after restoring to original status
3321 3334 self._repo.dirstate.invalidate()
3322 3335
3323 3336 self._repo.vfs.rename(self._filename, 'dirstate')
3324 3337 self._active = False
3325 3338
3326 3339 def release(self):
3327 3340 if not self._closed:
3328 3341 if not self._active: # already inactivated
3329 3342 msg = (_("can't release already inactivated backup: %s")
3330 3343 % self._filename)
3331 3344 raise util.Abort(msg)
3332 3345 self._abort()
3333 3346
3334 3347 _bundlecompspecs = {'none': None,
3335 3348 'bzip2': 'BZ',
3336 3349 'gzip': 'GZ',
3337 3350 }
3338 3351
3339 3352 _bundleversionspecs = {'v1': '01',
3340 3353 'v2': '02',
3341 3354 'bundle2': '02', #legacy
3342 3355 }
3343 3356
3344 3357 def parsebundletype(repo, spec):
3345 3358 """return the internal bundle type to use from a user input
3346 3359
3347 3360 This is parsing user specified bundle type as accepted in:
3348 3361
3349 3362 'hg bundle --type TYPE'.
3350 3363
3351 3364 It accept format in the form [compression][-version]|[version]
3352 3365
3353 3366 Consensus about extensions of the format for various bundle2 feature
3354 3367 is to prefix any feature with "+". eg "+treemanifest" or "gzip+phases"
3355 3368 """
3356 3369 comp, version = None, None
3357 3370
3358 3371 if '-' in spec:
3359 3372 comp, version = spec.split('-', 1)
3360 3373 elif spec in _bundlecompspecs:
3361 3374 comp = spec
3362 3375 elif spec in _bundleversionspecs:
3363 3376 version = spec
3364 3377 else:
3365 3378 raise util.Abort(_('unknown bundle type specified with --type'))
3366 3379
3367 3380 if comp is None:
3368 3381 comp = 'BZ'
3369 3382 else:
3370 3383 try:
3371 3384 comp = _bundlecompspecs[comp]
3372 3385 except KeyError:
3373 3386 raise util.Abort(_('unknown bundle type specified with --type'))
3374 3387
3375 3388 if version is None:
3376 3389 version = '01'
3377 3390 if 'generaldelta' in repo.requirements:
3378 3391 version = '02'
3379 3392 else:
3380 3393 try:
3381 3394 version = _bundleversionspecs[version]
3382 3395 except KeyError:
3383 3396 raise util.Abort(_('unknown bundle type specified with --type'))
3384 3397
3385 3398 return version, comp
General Comments 0
You need to be logged in to leave comments. Login now