##// END OF EJS Templates
cmdutil: pass node instead of ctx to diffordiffstat...
Durham Goode -
r27622:0bc71f45 default
parent child Browse files
Show More
@@ -1,3418 +1,3418 b''
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import hex, bin, nullid, nullrev, short
9 9 from i18n import _
10 10 import os, sys, errno, re, tempfile, cStringIO
11 11 import util, scmutil, templater, patch, error, templatekw, revlog, copies
12 12 import match as matchmod
13 13 import repair, graphmod, revset, phases, obsolete, pathutil
14 14 import changelog
15 15 import bookmarks
16 16 import encoding
17 17 import formatter
18 18 import crecord as crecordmod
19 19 import lock as lockmod
20 20
21 21 def ishunk(x):
22 22 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
23 23 return isinstance(x, hunkclasses)
24 24
25 25 def newandmodified(chunks, originalchunks):
26 26 newlyaddedandmodifiedfiles = set()
27 27 for chunk in chunks:
28 28 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
29 29 originalchunks:
30 30 newlyaddedandmodifiedfiles.add(chunk.header.filename())
31 31 return newlyaddedandmodifiedfiles
32 32
33 33 def parsealiases(cmd):
34 34 return cmd.lstrip("^").split("|")
35 35
36 36 def setupwrapcolorwrite(ui):
37 37 # wrap ui.write so diff output can be labeled/colorized
38 38 def wrapwrite(orig, *args, **kw):
39 39 label = kw.pop('label', '')
40 40 for chunk, l in patch.difflabel(lambda: args):
41 41 orig(chunk, label=label + l)
42 42
43 43 oldwrite = ui.write
44 44 def wrap(*args, **kwargs):
45 45 return wrapwrite(oldwrite, *args, **kwargs)
46 46 setattr(ui, 'write', wrap)
47 47 return oldwrite
48 48
49 49 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
50 50 if usecurses:
51 51 if testfile:
52 52 recordfn = crecordmod.testdecorator(testfile,
53 53 crecordmod.testchunkselector)
54 54 else:
55 55 recordfn = crecordmod.chunkselector
56 56
57 57 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
58 58
59 59 else:
60 60 return patch.filterpatch(ui, originalhunks, operation)
61 61
62 62 def recordfilter(ui, originalhunks, operation=None):
63 63 """ Prompts the user to filter the originalhunks and return a list of
64 64 selected hunks.
65 65 *operation* is used for ui purposes to indicate the user
66 66 what kind of filtering they are doing: reverting, committing, shelving, etc.
67 67 *operation* has to be a translated string.
68 68 """
69 69 usecurses = crecordmod.checkcurses(ui)
70 70 testfile = ui.config('experimental', 'crecordtest', None)
71 71 oldwrite = setupwrapcolorwrite(ui)
72 72 try:
73 73 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
74 74 testfile, operation)
75 75 finally:
76 76 ui.write = oldwrite
77 77 return newchunks, newopts
78 78
79 79 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
80 80 filterfn, *pats, **opts):
81 81 import merge as mergemod
82 82
83 83 if not ui.interactive():
84 84 if cmdsuggest:
85 85 msg = _('running non-interactively, use %s instead') % cmdsuggest
86 86 else:
87 87 msg = _('running non-interactively')
88 88 raise error.Abort(msg)
89 89
90 90 # make sure username is set before going interactive
91 91 if not opts.get('user'):
92 92 ui.username() # raise exception, username not provided
93 93
94 94 def recordfunc(ui, repo, message, match, opts):
95 95 """This is generic record driver.
96 96
97 97 Its job is to interactively filter local changes, and
98 98 accordingly prepare working directory into a state in which the
99 99 job can be delegated to a non-interactive commit command such as
100 100 'commit' or 'qrefresh'.
101 101
102 102 After the actual job is done by non-interactive command, the
103 103 working directory is restored to its original state.
104 104
105 105 In the end we'll record interesting changes, and everything else
106 106 will be left in place, so the user can continue working.
107 107 """
108 108
109 109 checkunfinished(repo, commit=True)
110 110 merge = len(repo[None].parents()) > 1
111 111 if merge:
112 112 raise error.Abort(_('cannot partially commit a merge '
113 113 '(use "hg commit" instead)'))
114 114
115 115 status = repo.status(match=match)
116 116 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
117 117 diffopts.nodates = True
118 118 diffopts.git = True
119 119 diffopts.showfunc = True
120 120 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
121 121 originalchunks = patch.parsepatch(originaldiff)
122 122
123 123 # 1. filter patch, so we have intending-to apply subset of it
124 124 try:
125 125 chunks, newopts = filterfn(ui, originalchunks)
126 126 except patch.PatchError as err:
127 127 raise error.Abort(_('error parsing patch: %s') % err)
128 128 opts.update(newopts)
129 129
130 130 # We need to keep a backup of files that have been newly added and
131 131 # modified during the recording process because there is a previous
132 132 # version without the edit in the workdir
133 133 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
134 134 contenders = set()
135 135 for h in chunks:
136 136 try:
137 137 contenders.update(set(h.files()))
138 138 except AttributeError:
139 139 pass
140 140
141 141 changed = status.modified + status.added + status.removed
142 142 newfiles = [f for f in changed if f in contenders]
143 143 if not newfiles:
144 144 ui.status(_('no changes to record\n'))
145 145 return 0
146 146
147 147 modified = set(status.modified)
148 148
149 149 # 2. backup changed files, so we can restore them in the end
150 150
151 151 if backupall:
152 152 tobackup = changed
153 153 else:
154 154 tobackup = [f for f in newfiles if f in modified or f in \
155 155 newlyaddedandmodifiedfiles]
156 156 backups = {}
157 157 if tobackup:
158 158 backupdir = repo.join('record-backups')
159 159 try:
160 160 os.mkdir(backupdir)
161 161 except OSError as err:
162 162 if err.errno != errno.EEXIST:
163 163 raise
164 164 try:
165 165 # backup continues
166 166 for f in tobackup:
167 167 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
168 168 dir=backupdir)
169 169 os.close(fd)
170 170 ui.debug('backup %r as %r\n' % (f, tmpname))
171 171 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
172 172 backups[f] = tmpname
173 173
174 174 fp = cStringIO.StringIO()
175 175 for c in chunks:
176 176 fname = c.filename()
177 177 if fname in backups:
178 178 c.write(fp)
179 179 dopatch = fp.tell()
180 180 fp.seek(0)
181 181
182 182 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
183 183 # 3a. apply filtered patch to clean repo (clean)
184 184 if backups:
185 185 # Equivalent to hg.revert
186 186 m = scmutil.matchfiles(repo, backups.keys())
187 187 mergemod.update(repo, repo.dirstate.p1(),
188 188 False, True, matcher=m)
189 189
190 190 # 3b. (apply)
191 191 if dopatch:
192 192 try:
193 193 ui.debug('applying patch\n')
194 194 ui.debug(fp.getvalue())
195 195 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
196 196 except patch.PatchError as err:
197 197 raise error.Abort(str(err))
198 198 del fp
199 199
200 200 # 4. We prepared working directory according to filtered
201 201 # patch. Now is the time to delegate the job to
202 202 # commit/qrefresh or the like!
203 203
204 204 # Make all of the pathnames absolute.
205 205 newfiles = [repo.wjoin(nf) for nf in newfiles]
206 206 return commitfunc(ui, repo, *newfiles, **opts)
207 207 finally:
208 208 # 5. finally restore backed-up files
209 209 try:
210 210 dirstate = repo.dirstate
211 211 for realname, tmpname in backups.iteritems():
212 212 ui.debug('restoring %r to %r\n' % (tmpname, realname))
213 213
214 214 if dirstate[realname] == 'n':
215 215 # without normallookup, restoring timestamp
216 216 # may cause partially committed files
217 217 # to be treated as unmodified
218 218 dirstate.normallookup(realname)
219 219
220 220 # copystat=True here and above are a hack to trick any
221 221 # editors that have f open that we haven't modified them.
222 222 #
223 223 # Also note that this racy as an editor could notice the
224 224 # file's mtime before we've finished writing it.
225 225 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
226 226 os.unlink(tmpname)
227 227 if tobackup:
228 228 os.rmdir(backupdir)
229 229 except OSError:
230 230 pass
231 231
232 232 def recordinwlock(ui, repo, message, match, opts):
233 233 wlock = repo.wlock()
234 234 try:
235 235 return recordfunc(ui, repo, message, match, opts)
236 236 finally:
237 237 wlock.release()
238 238
239 239 return commit(ui, repo, recordinwlock, pats, opts)
240 240
241 241 def findpossible(cmd, table, strict=False):
242 242 """
243 243 Return cmd -> (aliases, command table entry)
244 244 for each matching command.
245 245 Return debug commands (or their aliases) only if no normal command matches.
246 246 """
247 247 choice = {}
248 248 debugchoice = {}
249 249
250 250 if cmd in table:
251 251 # short-circuit exact matches, "log" alias beats "^log|history"
252 252 keys = [cmd]
253 253 else:
254 254 keys = table.keys()
255 255
256 256 allcmds = []
257 257 for e in keys:
258 258 aliases = parsealiases(e)
259 259 allcmds.extend(aliases)
260 260 found = None
261 261 if cmd in aliases:
262 262 found = cmd
263 263 elif not strict:
264 264 for a in aliases:
265 265 if a.startswith(cmd):
266 266 found = a
267 267 break
268 268 if found is not None:
269 269 if aliases[0].startswith("debug") or found.startswith("debug"):
270 270 debugchoice[found] = (aliases, table[e])
271 271 else:
272 272 choice[found] = (aliases, table[e])
273 273
274 274 if not choice and debugchoice:
275 275 choice = debugchoice
276 276
277 277 return choice, allcmds
278 278
279 279 def findcmd(cmd, table, strict=True):
280 280 """Return (aliases, command table entry) for command string."""
281 281 choice, allcmds = findpossible(cmd, table, strict)
282 282
283 283 if cmd in choice:
284 284 return choice[cmd]
285 285
286 286 if len(choice) > 1:
287 287 clist = choice.keys()
288 288 clist.sort()
289 289 raise error.AmbiguousCommand(cmd, clist)
290 290
291 291 if choice:
292 292 return choice.values()[0]
293 293
294 294 raise error.UnknownCommand(cmd, allcmds)
295 295
296 296 def findrepo(p):
297 297 while not os.path.isdir(os.path.join(p, ".hg")):
298 298 oldp, p = p, os.path.dirname(p)
299 299 if p == oldp:
300 300 return None
301 301
302 302 return p
303 303
304 304 def bailifchanged(repo, merge=True):
305 305 if merge and repo.dirstate.p2() != nullid:
306 306 raise error.Abort(_('outstanding uncommitted merge'))
307 307 modified, added, removed, deleted = repo.status()[:4]
308 308 if modified or added or removed or deleted:
309 309 raise error.Abort(_('uncommitted changes'))
310 310 ctx = repo[None]
311 311 for s in sorted(ctx.substate):
312 312 ctx.sub(s).bailifchanged()
313 313
314 314 def logmessage(ui, opts):
315 315 """ get the log message according to -m and -l option """
316 316 message = opts.get('message')
317 317 logfile = opts.get('logfile')
318 318
319 319 if message and logfile:
320 320 raise error.Abort(_('options --message and --logfile are mutually '
321 321 'exclusive'))
322 322 if not message and logfile:
323 323 try:
324 324 if logfile == '-':
325 325 message = ui.fin.read()
326 326 else:
327 327 message = '\n'.join(util.readfile(logfile).splitlines())
328 328 except IOError as inst:
329 329 raise error.Abort(_("can't read commit message '%s': %s") %
330 330 (logfile, inst.strerror))
331 331 return message
332 332
333 333 def mergeeditform(ctxorbool, baseformname):
334 334 """return appropriate editform name (referencing a committemplate)
335 335
336 336 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
337 337 merging is committed.
338 338
339 339 This returns baseformname with '.merge' appended if it is a merge,
340 340 otherwise '.normal' is appended.
341 341 """
342 342 if isinstance(ctxorbool, bool):
343 343 if ctxorbool:
344 344 return baseformname + ".merge"
345 345 elif 1 < len(ctxorbool.parents()):
346 346 return baseformname + ".merge"
347 347
348 348 return baseformname + ".normal"
349 349
350 350 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
351 351 editform='', **opts):
352 352 """get appropriate commit message editor according to '--edit' option
353 353
354 354 'finishdesc' is a function to be called with edited commit message
355 355 (= 'description' of the new changeset) just after editing, but
356 356 before checking empty-ness. It should return actual text to be
357 357 stored into history. This allows to change description before
358 358 storing.
359 359
360 360 'extramsg' is a extra message to be shown in the editor instead of
361 361 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
362 362 is automatically added.
363 363
364 364 'editform' is a dot-separated list of names, to distinguish
365 365 the purpose of commit text editing.
366 366
367 367 'getcommiteditor' returns 'commitforceeditor' regardless of
368 368 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
369 369 they are specific for usage in MQ.
370 370 """
371 371 if edit or finishdesc or extramsg:
372 372 return lambda r, c, s: commitforceeditor(r, c, s,
373 373 finishdesc=finishdesc,
374 374 extramsg=extramsg,
375 375 editform=editform)
376 376 elif editform:
377 377 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
378 378 else:
379 379 return commiteditor
380 380
381 381 def loglimit(opts):
382 382 """get the log limit according to option -l/--limit"""
383 383 limit = opts.get('limit')
384 384 if limit:
385 385 try:
386 386 limit = int(limit)
387 387 except ValueError:
388 388 raise error.Abort(_('limit must be a positive integer'))
389 389 if limit <= 0:
390 390 raise error.Abort(_('limit must be positive'))
391 391 else:
392 392 limit = None
393 393 return limit
394 394
395 395 def makefilename(repo, pat, node, desc=None,
396 396 total=None, seqno=None, revwidth=None, pathname=None):
397 397 node_expander = {
398 398 'H': lambda: hex(node),
399 399 'R': lambda: str(repo.changelog.rev(node)),
400 400 'h': lambda: short(node),
401 401 'm': lambda: re.sub('[^\w]', '_', str(desc))
402 402 }
403 403 expander = {
404 404 '%': lambda: '%',
405 405 'b': lambda: os.path.basename(repo.root),
406 406 }
407 407
408 408 try:
409 409 if node:
410 410 expander.update(node_expander)
411 411 if node:
412 412 expander['r'] = (lambda:
413 413 str(repo.changelog.rev(node)).zfill(revwidth or 0))
414 414 if total is not None:
415 415 expander['N'] = lambda: str(total)
416 416 if seqno is not None:
417 417 expander['n'] = lambda: str(seqno)
418 418 if total is not None and seqno is not None:
419 419 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
420 420 if pathname is not None:
421 421 expander['s'] = lambda: os.path.basename(pathname)
422 422 expander['d'] = lambda: os.path.dirname(pathname) or '.'
423 423 expander['p'] = lambda: pathname
424 424
425 425 newname = []
426 426 patlen = len(pat)
427 427 i = 0
428 428 while i < patlen:
429 429 c = pat[i]
430 430 if c == '%':
431 431 i += 1
432 432 c = pat[i]
433 433 c = expander[c]()
434 434 newname.append(c)
435 435 i += 1
436 436 return ''.join(newname)
437 437 except KeyError as inst:
438 438 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
439 439 inst.args[0])
440 440
441 441 class _unclosablefile(object):
442 442 def __init__(self, fp):
443 443 self._fp = fp
444 444
445 445 def close(self):
446 446 pass
447 447
448 448 def __iter__(self):
449 449 return iter(self._fp)
450 450
451 451 def __getattr__(self, attr):
452 452 return getattr(self._fp, attr)
453 453
454 454 def makefileobj(repo, pat, node=None, desc=None, total=None,
455 455 seqno=None, revwidth=None, mode='wb', modemap=None,
456 456 pathname=None):
457 457
458 458 writable = mode not in ('r', 'rb')
459 459
460 460 if not pat or pat == '-':
461 461 if writable:
462 462 fp = repo.ui.fout
463 463 else:
464 464 fp = repo.ui.fin
465 465 return _unclosablefile(fp)
466 466 if util.safehasattr(pat, 'write') and writable:
467 467 return pat
468 468 if util.safehasattr(pat, 'read') and 'r' in mode:
469 469 return pat
470 470 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
471 471 if modemap is not None:
472 472 mode = modemap.get(fn, mode)
473 473 if mode == 'wb':
474 474 modemap[fn] = 'ab'
475 475 return open(fn, mode)
476 476
477 477 def openrevlog(repo, cmd, file_, opts):
478 478 """opens the changelog, manifest, a filelog or a given revlog"""
479 479 cl = opts['changelog']
480 480 mf = opts['manifest']
481 481 dir = opts['dir']
482 482 msg = None
483 483 if cl and mf:
484 484 msg = _('cannot specify --changelog and --manifest at the same time')
485 485 elif cl and dir:
486 486 msg = _('cannot specify --changelog and --dir at the same time')
487 487 elif cl or mf:
488 488 if file_:
489 489 msg = _('cannot specify filename with --changelog or --manifest')
490 490 elif not repo:
491 491 msg = _('cannot specify --changelog or --manifest or --dir '
492 492 'without a repository')
493 493 if msg:
494 494 raise error.Abort(msg)
495 495
496 496 r = None
497 497 if repo:
498 498 if cl:
499 499 r = repo.unfiltered().changelog
500 500 elif dir:
501 501 if 'treemanifest' not in repo.requirements:
502 502 raise error.Abort(_("--dir can only be used on repos with "
503 503 "treemanifest enabled"))
504 504 dirlog = repo.dirlog(file_)
505 505 if len(dirlog):
506 506 r = dirlog
507 507 elif mf:
508 508 r = repo.manifest
509 509 elif file_:
510 510 filelog = repo.file(file_)
511 511 if len(filelog):
512 512 r = filelog
513 513 if not r:
514 514 if not file_:
515 515 raise error.CommandError(cmd, _('invalid arguments'))
516 516 if not os.path.isfile(file_):
517 517 raise error.Abort(_("revlog '%s' not found") % file_)
518 518 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
519 519 file_[:-2] + ".i")
520 520 return r
521 521
522 522 def copy(ui, repo, pats, opts, rename=False):
523 523 # called with the repo lock held
524 524 #
525 525 # hgsep => pathname that uses "/" to separate directories
526 526 # ossep => pathname that uses os.sep to separate directories
527 527 cwd = repo.getcwd()
528 528 targets = {}
529 529 after = opts.get("after")
530 530 dryrun = opts.get("dry_run")
531 531 wctx = repo[None]
532 532
533 533 def walkpat(pat):
534 534 srcs = []
535 535 if after:
536 536 badstates = '?'
537 537 else:
538 538 badstates = '?r'
539 539 m = scmutil.match(repo[None], [pat], opts, globbed=True)
540 540 for abs in repo.walk(m):
541 541 state = repo.dirstate[abs]
542 542 rel = m.rel(abs)
543 543 exact = m.exact(abs)
544 544 if state in badstates:
545 545 if exact and state == '?':
546 546 ui.warn(_('%s: not copying - file is not managed\n') % rel)
547 547 if exact and state == 'r':
548 548 ui.warn(_('%s: not copying - file has been marked for'
549 549 ' remove\n') % rel)
550 550 continue
551 551 # abs: hgsep
552 552 # rel: ossep
553 553 srcs.append((abs, rel, exact))
554 554 return srcs
555 555
556 556 # abssrc: hgsep
557 557 # relsrc: ossep
558 558 # otarget: ossep
559 559 def copyfile(abssrc, relsrc, otarget, exact):
560 560 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
561 561 if '/' in abstarget:
562 562 # We cannot normalize abstarget itself, this would prevent
563 563 # case only renames, like a => A.
564 564 abspath, absname = abstarget.rsplit('/', 1)
565 565 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
566 566 reltarget = repo.pathto(abstarget, cwd)
567 567 target = repo.wjoin(abstarget)
568 568 src = repo.wjoin(abssrc)
569 569 state = repo.dirstate[abstarget]
570 570
571 571 scmutil.checkportable(ui, abstarget)
572 572
573 573 # check for collisions
574 574 prevsrc = targets.get(abstarget)
575 575 if prevsrc is not None:
576 576 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
577 577 (reltarget, repo.pathto(abssrc, cwd),
578 578 repo.pathto(prevsrc, cwd)))
579 579 return
580 580
581 581 # check for overwrites
582 582 exists = os.path.lexists(target)
583 583 samefile = False
584 584 if exists and abssrc != abstarget:
585 585 if (repo.dirstate.normalize(abssrc) ==
586 586 repo.dirstate.normalize(abstarget)):
587 587 if not rename:
588 588 ui.warn(_("%s: can't copy - same file\n") % reltarget)
589 589 return
590 590 exists = False
591 591 samefile = True
592 592
593 593 if not after and exists or after and state in 'mn':
594 594 if not opts['force']:
595 595 ui.warn(_('%s: not overwriting - file exists\n') %
596 596 reltarget)
597 597 return
598 598
599 599 if after:
600 600 if not exists:
601 601 if rename:
602 602 ui.warn(_('%s: not recording move - %s does not exist\n') %
603 603 (relsrc, reltarget))
604 604 else:
605 605 ui.warn(_('%s: not recording copy - %s does not exist\n') %
606 606 (relsrc, reltarget))
607 607 return
608 608 elif not dryrun:
609 609 try:
610 610 if exists:
611 611 os.unlink(target)
612 612 targetdir = os.path.dirname(target) or '.'
613 613 if not os.path.isdir(targetdir):
614 614 os.makedirs(targetdir)
615 615 if samefile:
616 616 tmp = target + "~hgrename"
617 617 os.rename(src, tmp)
618 618 os.rename(tmp, target)
619 619 else:
620 620 util.copyfile(src, target)
621 621 srcexists = True
622 622 except IOError as inst:
623 623 if inst.errno == errno.ENOENT:
624 624 ui.warn(_('%s: deleted in working directory\n') % relsrc)
625 625 srcexists = False
626 626 else:
627 627 ui.warn(_('%s: cannot copy - %s\n') %
628 628 (relsrc, inst.strerror))
629 629 return True # report a failure
630 630
631 631 if ui.verbose or not exact:
632 632 if rename:
633 633 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
634 634 else:
635 635 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
636 636
637 637 targets[abstarget] = abssrc
638 638
639 639 # fix up dirstate
640 640 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
641 641 dryrun=dryrun, cwd=cwd)
642 642 if rename and not dryrun:
643 643 if not after and srcexists and not samefile:
644 644 util.unlinkpath(repo.wjoin(abssrc))
645 645 wctx.forget([abssrc])
646 646
647 647 # pat: ossep
648 648 # dest ossep
649 649 # srcs: list of (hgsep, hgsep, ossep, bool)
650 650 # return: function that takes hgsep and returns ossep
651 651 def targetpathfn(pat, dest, srcs):
652 652 if os.path.isdir(pat):
653 653 abspfx = pathutil.canonpath(repo.root, cwd, pat)
654 654 abspfx = util.localpath(abspfx)
655 655 if destdirexists:
656 656 striplen = len(os.path.split(abspfx)[0])
657 657 else:
658 658 striplen = len(abspfx)
659 659 if striplen:
660 660 striplen += len(os.sep)
661 661 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
662 662 elif destdirexists:
663 663 res = lambda p: os.path.join(dest,
664 664 os.path.basename(util.localpath(p)))
665 665 else:
666 666 res = lambda p: dest
667 667 return res
668 668
669 669 # pat: ossep
670 670 # dest ossep
671 671 # srcs: list of (hgsep, hgsep, ossep, bool)
672 672 # return: function that takes hgsep and returns ossep
673 673 def targetpathafterfn(pat, dest, srcs):
674 674 if matchmod.patkind(pat):
675 675 # a mercurial pattern
676 676 res = lambda p: os.path.join(dest,
677 677 os.path.basename(util.localpath(p)))
678 678 else:
679 679 abspfx = pathutil.canonpath(repo.root, cwd, pat)
680 680 if len(abspfx) < len(srcs[0][0]):
681 681 # A directory. Either the target path contains the last
682 682 # component of the source path or it does not.
683 683 def evalpath(striplen):
684 684 score = 0
685 685 for s in srcs:
686 686 t = os.path.join(dest, util.localpath(s[0])[striplen:])
687 687 if os.path.lexists(t):
688 688 score += 1
689 689 return score
690 690
691 691 abspfx = util.localpath(abspfx)
692 692 striplen = len(abspfx)
693 693 if striplen:
694 694 striplen += len(os.sep)
695 695 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
696 696 score = evalpath(striplen)
697 697 striplen1 = len(os.path.split(abspfx)[0])
698 698 if striplen1:
699 699 striplen1 += len(os.sep)
700 700 if evalpath(striplen1) > score:
701 701 striplen = striplen1
702 702 res = lambda p: os.path.join(dest,
703 703 util.localpath(p)[striplen:])
704 704 else:
705 705 # a file
706 706 if destdirexists:
707 707 res = lambda p: os.path.join(dest,
708 708 os.path.basename(util.localpath(p)))
709 709 else:
710 710 res = lambda p: dest
711 711 return res
712 712
713 713 pats = scmutil.expandpats(pats)
714 714 if not pats:
715 715 raise error.Abort(_('no source or destination specified'))
716 716 if len(pats) == 1:
717 717 raise error.Abort(_('no destination specified'))
718 718 dest = pats.pop()
719 719 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
720 720 if not destdirexists:
721 721 if len(pats) > 1 or matchmod.patkind(pats[0]):
722 722 raise error.Abort(_('with multiple sources, destination must be an '
723 723 'existing directory'))
724 724 if util.endswithsep(dest):
725 725 raise error.Abort(_('destination %s is not a directory') % dest)
726 726
727 727 tfn = targetpathfn
728 728 if after:
729 729 tfn = targetpathafterfn
730 730 copylist = []
731 731 for pat in pats:
732 732 srcs = walkpat(pat)
733 733 if not srcs:
734 734 continue
735 735 copylist.append((tfn(pat, dest, srcs), srcs))
736 736 if not copylist:
737 737 raise error.Abort(_('no files to copy'))
738 738
739 739 errors = 0
740 740 for targetpath, srcs in copylist:
741 741 for abssrc, relsrc, exact in srcs:
742 742 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
743 743 errors += 1
744 744
745 745 if errors:
746 746 ui.warn(_('(consider using --after)\n'))
747 747
748 748 return errors != 0
749 749
750 750 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
751 751 runargs=None, appendpid=False):
752 752 '''Run a command as a service.'''
753 753
754 754 def writepid(pid):
755 755 if opts['pid_file']:
756 756 if appendpid:
757 757 mode = 'a'
758 758 else:
759 759 mode = 'w'
760 760 fp = open(opts['pid_file'], mode)
761 761 fp.write(str(pid) + '\n')
762 762 fp.close()
763 763
764 764 if opts['daemon'] and not opts['daemon_pipefds']:
765 765 # Signal child process startup with file removal
766 766 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
767 767 os.close(lockfd)
768 768 try:
769 769 if not runargs:
770 770 runargs = util.hgcmd() + sys.argv[1:]
771 771 runargs.append('--daemon-pipefds=%s' % lockpath)
772 772 # Don't pass --cwd to the child process, because we've already
773 773 # changed directory.
774 774 for i in xrange(1, len(runargs)):
775 775 if runargs[i].startswith('--cwd='):
776 776 del runargs[i]
777 777 break
778 778 elif runargs[i].startswith('--cwd'):
779 779 del runargs[i:i + 2]
780 780 break
781 781 def condfn():
782 782 return not os.path.exists(lockpath)
783 783 pid = util.rundetached(runargs, condfn)
784 784 if pid < 0:
785 785 raise error.Abort(_('child process failed to start'))
786 786 writepid(pid)
787 787 finally:
788 788 try:
789 789 os.unlink(lockpath)
790 790 except OSError as e:
791 791 if e.errno != errno.ENOENT:
792 792 raise
793 793 if parentfn:
794 794 return parentfn(pid)
795 795 else:
796 796 return
797 797
798 798 if initfn:
799 799 initfn()
800 800
801 801 if not opts['daemon']:
802 802 writepid(os.getpid())
803 803
804 804 if opts['daemon_pipefds']:
805 805 lockpath = opts['daemon_pipefds']
806 806 try:
807 807 os.setsid()
808 808 except AttributeError:
809 809 pass
810 810 os.unlink(lockpath)
811 811 util.hidewindow()
812 812 sys.stdout.flush()
813 813 sys.stderr.flush()
814 814
815 815 nullfd = os.open(os.devnull, os.O_RDWR)
816 816 logfilefd = nullfd
817 817 if logfile:
818 818 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
819 819 os.dup2(nullfd, 0)
820 820 os.dup2(logfilefd, 1)
821 821 os.dup2(logfilefd, 2)
822 822 if nullfd not in (0, 1, 2):
823 823 os.close(nullfd)
824 824 if logfile and logfilefd not in (0, 1, 2):
825 825 os.close(logfilefd)
826 826
827 827 if runfn:
828 828 return runfn()
829 829
830 830 ## facility to let extension process additional data into an import patch
831 831 # list of identifier to be executed in order
832 832 extrapreimport = [] # run before commit
833 833 extrapostimport = [] # run after commit
834 834 # mapping from identifier to actual import function
835 835 #
836 836 # 'preimport' are run before the commit is made and are provided the following
837 837 # arguments:
838 838 # - repo: the localrepository instance,
839 839 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
840 840 # - extra: the future extra dictionary of the changeset, please mutate it,
841 841 # - opts: the import options.
842 842 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
843 843 # mutation of in memory commit and more. Feel free to rework the code to get
844 844 # there.
845 845 extrapreimportmap = {}
846 846 # 'postimport' are run after the commit is made and are provided the following
847 847 # argument:
848 848 # - ctx: the changectx created by import.
849 849 extrapostimportmap = {}
850 850
851 851 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
852 852 """Utility function used by commands.import to import a single patch
853 853
854 854 This function is explicitly defined here to help the evolve extension to
855 855 wrap this part of the import logic.
856 856
857 857 The API is currently a bit ugly because it a simple code translation from
858 858 the import command. Feel free to make it better.
859 859
860 860 :hunk: a patch (as a binary string)
861 861 :parents: nodes that will be parent of the created commit
862 862 :opts: the full dict of option passed to the import command
863 863 :msgs: list to save commit message to.
864 864 (used in case we need to save it when failing)
865 865 :updatefunc: a function that update a repo to a given node
866 866 updatefunc(<repo>, <node>)
867 867 """
868 868 # avoid cycle context -> subrepo -> cmdutil
869 869 import context
870 870 extractdata = patch.extract(ui, hunk)
871 871 tmpname = extractdata.get('filename')
872 872 message = extractdata.get('message')
873 873 user = opts.get('user') or extractdata.get('user')
874 874 date = opts.get('date') or extractdata.get('date')
875 875 branch = extractdata.get('branch')
876 876 nodeid = extractdata.get('nodeid')
877 877 p1 = extractdata.get('p1')
878 878 p2 = extractdata.get('p2')
879 879
880 880 nocommit = opts.get('no_commit')
881 881 importbranch = opts.get('import_branch')
882 882 update = not opts.get('bypass')
883 883 strip = opts["strip"]
884 884 prefix = opts["prefix"]
885 885 sim = float(opts.get('similarity') or 0)
886 886 if not tmpname:
887 887 return (None, None, False)
888 888
889 889 rejects = False
890 890
891 891 try:
892 892 cmdline_message = logmessage(ui, opts)
893 893 if cmdline_message:
894 894 # pickup the cmdline msg
895 895 message = cmdline_message
896 896 elif message:
897 897 # pickup the patch msg
898 898 message = message.strip()
899 899 else:
900 900 # launch the editor
901 901 message = None
902 902 ui.debug('message:\n%s\n' % message)
903 903
904 904 if len(parents) == 1:
905 905 parents.append(repo[nullid])
906 906 if opts.get('exact'):
907 907 if not nodeid or not p1:
908 908 raise error.Abort(_('not a Mercurial patch'))
909 909 p1 = repo[p1]
910 910 p2 = repo[p2 or nullid]
911 911 elif p2:
912 912 try:
913 913 p1 = repo[p1]
914 914 p2 = repo[p2]
915 915 # Without any options, consider p2 only if the
916 916 # patch is being applied on top of the recorded
917 917 # first parent.
918 918 if p1 != parents[0]:
919 919 p1 = parents[0]
920 920 p2 = repo[nullid]
921 921 except error.RepoError:
922 922 p1, p2 = parents
923 923 if p2.node() == nullid:
924 924 ui.warn(_("warning: import the patch as a normal revision\n"
925 925 "(use --exact to import the patch as a merge)\n"))
926 926 else:
927 927 p1, p2 = parents
928 928
929 929 n = None
930 930 if update:
931 931 if p1 != parents[0]:
932 932 updatefunc(repo, p1.node())
933 933 if p2 != parents[1]:
934 934 repo.setparents(p1.node(), p2.node())
935 935
936 936 if opts.get('exact') or importbranch:
937 937 repo.dirstate.setbranch(branch or 'default')
938 938
939 939 partial = opts.get('partial', False)
940 940 files = set()
941 941 try:
942 942 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
943 943 files=files, eolmode=None, similarity=sim / 100.0)
944 944 except patch.PatchError as e:
945 945 if not partial:
946 946 raise error.Abort(str(e))
947 947 if partial:
948 948 rejects = True
949 949
950 950 files = list(files)
951 951 if nocommit:
952 952 if message:
953 953 msgs.append(message)
954 954 else:
955 955 if opts.get('exact') or p2:
956 956 # If you got here, you either use --force and know what
957 957 # you are doing or used --exact or a merge patch while
958 958 # being updated to its first parent.
959 959 m = None
960 960 else:
961 961 m = scmutil.matchfiles(repo, files or [])
962 962 editform = mergeeditform(repo[None], 'import.normal')
963 963 if opts.get('exact'):
964 964 editor = None
965 965 else:
966 966 editor = getcommiteditor(editform=editform, **opts)
967 967 allowemptyback = repo.ui.backupconfig('ui', 'allowemptycommit')
968 968 extra = {}
969 969 for idfunc in extrapreimport:
970 970 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
971 971 try:
972 972 if partial:
973 973 repo.ui.setconfig('ui', 'allowemptycommit', True)
974 974 n = repo.commit(message, user,
975 975 date, match=m,
976 976 editor=editor, extra=extra)
977 977 for idfunc in extrapostimport:
978 978 extrapostimportmap[idfunc](repo[n])
979 979 finally:
980 980 repo.ui.restoreconfig(allowemptyback)
981 981 else:
982 982 if opts.get('exact') or importbranch:
983 983 branch = branch or 'default'
984 984 else:
985 985 branch = p1.branch()
986 986 store = patch.filestore()
987 987 try:
988 988 files = set()
989 989 try:
990 990 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
991 991 files, eolmode=None)
992 992 except patch.PatchError as e:
993 993 raise error.Abort(str(e))
994 994 if opts.get('exact'):
995 995 editor = None
996 996 else:
997 997 editor = getcommiteditor(editform='import.bypass')
998 998 memctx = context.makememctx(repo, (p1.node(), p2.node()),
999 999 message,
1000 1000 user,
1001 1001 date,
1002 1002 branch, files, store,
1003 1003 editor=editor)
1004 1004 n = memctx.commit()
1005 1005 finally:
1006 1006 store.close()
1007 1007 if opts.get('exact') and nocommit:
1008 1008 # --exact with --no-commit is still useful in that it does merge
1009 1009 # and branch bits
1010 1010 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1011 1011 elif opts.get('exact') and hex(n) != nodeid:
1012 1012 raise error.Abort(_('patch is damaged or loses information'))
1013 1013 msg = _('applied to working directory')
1014 1014 if n:
1015 1015 # i18n: refers to a short changeset id
1016 1016 msg = _('created %s') % short(n)
1017 1017 return (msg, n, rejects)
1018 1018 finally:
1019 1019 os.unlink(tmpname)
1020 1020
1021 1021 # facility to let extensions include additional data in an exported patch
1022 1022 # list of identifiers to be executed in order
1023 1023 extraexport = []
1024 1024 # mapping from identifier to actual export function
1025 1025 # function as to return a string to be added to the header or None
1026 1026 # it is given two arguments (sequencenumber, changectx)
1027 1027 extraexportmap = {}
1028 1028
1029 1029 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1030 1030 opts=None, match=None):
1031 1031 '''export changesets as hg patches.'''
1032 1032
1033 1033 total = len(revs)
1034 1034 revwidth = max([len(str(rev)) for rev in revs])
1035 1035 filemode = {}
1036 1036
1037 1037 def single(rev, seqno, fp):
1038 1038 ctx = repo[rev]
1039 1039 node = ctx.node()
1040 1040 parents = [p.node() for p in ctx.parents() if p]
1041 1041 branch = ctx.branch()
1042 1042 if switch_parent:
1043 1043 parents.reverse()
1044 1044
1045 1045 if parents:
1046 1046 prev = parents[0]
1047 1047 else:
1048 1048 prev = nullid
1049 1049
1050 1050 shouldclose = False
1051 1051 if not fp and len(template) > 0:
1052 1052 desc_lines = ctx.description().rstrip().split('\n')
1053 1053 desc = desc_lines[0] #Commit always has a first line.
1054 1054 fp = makefileobj(repo, template, node, desc=desc, total=total,
1055 1055 seqno=seqno, revwidth=revwidth, mode='wb',
1056 1056 modemap=filemode)
1057 1057 shouldclose = True
1058 1058 if fp and not getattr(fp, 'name', '<unnamed>').startswith('<'):
1059 1059 repo.ui.note("%s\n" % fp.name)
1060 1060
1061 1061 if not fp:
1062 1062 write = repo.ui.write
1063 1063 else:
1064 1064 def write(s, **kw):
1065 1065 fp.write(s)
1066 1066
1067 1067 write("# HG changeset patch\n")
1068 1068 write("# User %s\n" % ctx.user())
1069 1069 write("# Date %d %d\n" % ctx.date())
1070 1070 write("# %s\n" % util.datestr(ctx.date()))
1071 1071 if branch and branch != 'default':
1072 1072 write("# Branch %s\n" % branch)
1073 1073 write("# Node ID %s\n" % hex(node))
1074 1074 write("# Parent %s\n" % hex(prev))
1075 1075 if len(parents) > 1:
1076 1076 write("# Parent %s\n" % hex(parents[1]))
1077 1077
1078 1078 for headerid in extraexport:
1079 1079 header = extraexportmap[headerid](seqno, ctx)
1080 1080 if header is not None:
1081 1081 write('# %s\n' % header)
1082 1082 write(ctx.description().rstrip())
1083 1083 write("\n\n")
1084 1084
1085 1085 for chunk, label in patch.diffui(repo, prev, node, match, opts=opts):
1086 1086 write(chunk, label=label)
1087 1087
1088 1088 if shouldclose:
1089 1089 fp.close()
1090 1090
1091 1091 for seqno, rev in enumerate(revs):
1092 1092 single(rev, seqno + 1, fp)
1093 1093
1094 1094 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1095 1095 changes=None, stat=False, fp=None, prefix='',
1096 1096 root='', listsubrepos=False):
1097 1097 '''show diff or diffstat.'''
1098 1098 if fp is None:
1099 1099 write = ui.write
1100 1100 else:
1101 1101 def write(s, **kw):
1102 1102 fp.write(s)
1103 1103
1104 1104 if root:
1105 1105 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1106 1106 else:
1107 1107 relroot = ''
1108 1108 if relroot != '':
1109 1109 # XXX relative roots currently don't work if the root is within a
1110 1110 # subrepo
1111 1111 uirelroot = match.uipath(relroot)
1112 1112 relroot += '/'
1113 1113 for matchroot in match.files():
1114 1114 if not matchroot.startswith(relroot):
1115 1115 ui.warn(_('warning: %s not inside relative root %s\n') % (
1116 1116 match.uipath(matchroot), uirelroot))
1117 1117
1118 1118 if stat:
1119 1119 diffopts = diffopts.copy(context=0)
1120 1120 width = 80
1121 1121 if not ui.plain():
1122 1122 width = ui.termwidth()
1123 1123 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1124 1124 prefix=prefix, relroot=relroot)
1125 1125 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1126 1126 width=width,
1127 1127 git=diffopts.git):
1128 1128 write(chunk, label=label)
1129 1129 else:
1130 1130 for chunk, label in patch.diffui(repo, node1, node2, match,
1131 1131 changes, diffopts, prefix=prefix,
1132 1132 relroot=relroot):
1133 1133 write(chunk, label=label)
1134 1134
1135 1135 if listsubrepos:
1136 1136 ctx1 = repo[node1]
1137 1137 ctx2 = repo[node2]
1138 1138 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1139 1139 tempnode2 = node2
1140 1140 try:
1141 1141 if node2 is not None:
1142 1142 tempnode2 = ctx2.substate[subpath][1]
1143 1143 except KeyError:
1144 1144 # A subrepo that existed in node1 was deleted between node1 and
1145 1145 # node2 (inclusive). Thus, ctx2's substate won't contain that
1146 1146 # subpath. The best we can do is to ignore it.
1147 1147 tempnode2 = None
1148 1148 submatch = matchmod.narrowmatcher(subpath, match)
1149 1149 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1150 1150 stat=stat, fp=fp, prefix=prefix)
1151 1151
1152 1152 class changeset_printer(object):
1153 1153 '''show changeset information when templating not requested.'''
1154 1154
1155 1155 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1156 1156 self.ui = ui
1157 1157 self.repo = repo
1158 1158 self.buffered = buffered
1159 1159 self.matchfn = matchfn
1160 1160 self.diffopts = diffopts
1161 1161 self.header = {}
1162 1162 self.hunk = {}
1163 1163 self.lastheader = None
1164 1164 self.footer = None
1165 1165
1166 1166 def flush(self, ctx):
1167 1167 rev = ctx.rev()
1168 1168 if rev in self.header:
1169 1169 h = self.header[rev]
1170 1170 if h != self.lastheader:
1171 1171 self.lastheader = h
1172 1172 self.ui.write(h)
1173 1173 del self.header[rev]
1174 1174 if rev in self.hunk:
1175 1175 self.ui.write(self.hunk[rev])
1176 1176 del self.hunk[rev]
1177 1177 return 1
1178 1178 return 0
1179 1179
1180 1180 def close(self):
1181 1181 if self.footer:
1182 1182 self.ui.write(self.footer)
1183 1183
1184 1184 def show(self, ctx, copies=None, matchfn=None, **props):
1185 1185 if self.buffered:
1186 1186 self.ui.pushbuffer(labeled=True)
1187 1187 self._show(ctx, copies, matchfn, props)
1188 1188 self.hunk[ctx.rev()] = self.ui.popbuffer()
1189 1189 else:
1190 1190 self._show(ctx, copies, matchfn, props)
1191 1191
1192 1192 def _show(self, ctx, copies, matchfn, props):
1193 1193 '''show a single changeset or file revision'''
1194 1194 changenode = ctx.node()
1195 1195 rev = ctx.rev()
1196 1196 if self.ui.debugflag:
1197 1197 hexfunc = hex
1198 1198 else:
1199 1199 hexfunc = short
1200 1200 # as of now, wctx.node() and wctx.rev() return None, but we want to
1201 1201 # show the same values as {node} and {rev} templatekw
1202 1202 revnode = (scmutil.intrev(rev), hexfunc(bin(ctx.hex())))
1203 1203
1204 1204 if self.ui.quiet:
1205 1205 self.ui.write("%d:%s\n" % revnode, label='log.node')
1206 1206 return
1207 1207
1208 1208 date = util.datestr(ctx.date())
1209 1209
1210 1210 # i18n: column positioning for "hg log"
1211 1211 self.ui.write(_("changeset: %d:%s\n") % revnode,
1212 1212 label='log.changeset changeset.%s' % ctx.phasestr())
1213 1213
1214 1214 # branches are shown first before any other names due to backwards
1215 1215 # compatibility
1216 1216 branch = ctx.branch()
1217 1217 # don't show the default branch name
1218 1218 if branch != 'default':
1219 1219 # i18n: column positioning for "hg log"
1220 1220 self.ui.write(_("branch: %s\n") % branch,
1221 1221 label='log.branch')
1222 1222
1223 1223 for name, ns in self.repo.names.iteritems():
1224 1224 # branches has special logic already handled above, so here we just
1225 1225 # skip it
1226 1226 if name == 'branches':
1227 1227 continue
1228 1228 # we will use the templatename as the color name since those two
1229 1229 # should be the same
1230 1230 for name in ns.names(self.repo, changenode):
1231 1231 self.ui.write(ns.logfmt % name,
1232 1232 label='log.%s' % ns.colorname)
1233 1233 if self.ui.debugflag:
1234 1234 # i18n: column positioning for "hg log"
1235 1235 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1236 1236 label='log.phase')
1237 1237 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1238 1238 label = 'log.parent changeset.%s' % pctx.phasestr()
1239 1239 # i18n: column positioning for "hg log"
1240 1240 self.ui.write(_("parent: %d:%s\n")
1241 1241 % (pctx.rev(), hexfunc(pctx.node())),
1242 1242 label=label)
1243 1243
1244 1244 if self.ui.debugflag and rev is not None:
1245 1245 mnode = ctx.manifestnode()
1246 1246 # i18n: column positioning for "hg log"
1247 1247 self.ui.write(_("manifest: %d:%s\n") %
1248 1248 (self.repo.manifest.rev(mnode), hex(mnode)),
1249 1249 label='ui.debug log.manifest')
1250 1250 # i18n: column positioning for "hg log"
1251 1251 self.ui.write(_("user: %s\n") % ctx.user(),
1252 1252 label='log.user')
1253 1253 # i18n: column positioning for "hg log"
1254 1254 self.ui.write(_("date: %s\n") % date,
1255 1255 label='log.date')
1256 1256
1257 1257 if self.ui.debugflag:
1258 1258 files = ctx.p1().status(ctx)[:3]
1259 1259 for key, value in zip([# i18n: column positioning for "hg log"
1260 1260 _("files:"),
1261 1261 # i18n: column positioning for "hg log"
1262 1262 _("files+:"),
1263 1263 # i18n: column positioning for "hg log"
1264 1264 _("files-:")], files):
1265 1265 if value:
1266 1266 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1267 1267 label='ui.debug log.files')
1268 1268 elif ctx.files() and self.ui.verbose:
1269 1269 # i18n: column positioning for "hg log"
1270 1270 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1271 1271 label='ui.note log.files')
1272 1272 if copies and self.ui.verbose:
1273 1273 copies = ['%s (%s)' % c for c in copies]
1274 1274 # i18n: column positioning for "hg log"
1275 1275 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1276 1276 label='ui.note log.copies')
1277 1277
1278 1278 extra = ctx.extra()
1279 1279 if extra and self.ui.debugflag:
1280 1280 for key, value in sorted(extra.items()):
1281 1281 # i18n: column positioning for "hg log"
1282 1282 self.ui.write(_("extra: %s=%s\n")
1283 1283 % (key, value.encode('string_escape')),
1284 1284 label='ui.debug log.extra')
1285 1285
1286 1286 description = ctx.description().strip()
1287 1287 if description:
1288 1288 if self.ui.verbose:
1289 1289 self.ui.write(_("description:\n"),
1290 1290 label='ui.note log.description')
1291 1291 self.ui.write(description,
1292 1292 label='ui.note log.description')
1293 1293 self.ui.write("\n\n")
1294 1294 else:
1295 1295 # i18n: column positioning for "hg log"
1296 1296 self.ui.write(_("summary: %s\n") %
1297 1297 description.splitlines()[0],
1298 1298 label='log.summary')
1299 1299 self.ui.write("\n")
1300 1300
1301 1301 self.showpatch(ctx, matchfn)
1302 1302
1303 1303 def showpatch(self, ctx, matchfn):
1304 1304 if not matchfn:
1305 1305 matchfn = self.matchfn
1306 1306 if matchfn:
1307 1307 stat = self.diffopts.get('stat')
1308 1308 diff = self.diffopts.get('patch')
1309 1309 diffopts = patch.diffallopts(self.ui, self.diffopts)
1310 1310 node = ctx.node()
1311 prev = ctx.p1()
1311 prev = ctx.p1().node()
1312 1312 if stat:
1313 1313 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1314 1314 match=matchfn, stat=True)
1315 1315 if diff:
1316 1316 if stat:
1317 1317 self.ui.write("\n")
1318 1318 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1319 1319 match=matchfn, stat=False)
1320 1320 self.ui.write("\n")
1321 1321
1322 1322 class jsonchangeset(changeset_printer):
1323 1323 '''format changeset information.'''
1324 1324
1325 1325 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1326 1326 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1327 1327 self.cache = {}
1328 1328 self._first = True
1329 1329
1330 1330 def close(self):
1331 1331 if not self._first:
1332 1332 self.ui.write("\n]\n")
1333 1333 else:
1334 1334 self.ui.write("[]\n")
1335 1335
1336 1336 def _show(self, ctx, copies, matchfn, props):
1337 1337 '''show a single changeset or file revision'''
1338 1338 rev = ctx.rev()
1339 1339 if rev is None:
1340 1340 jrev = jnode = 'null'
1341 1341 else:
1342 1342 jrev = str(rev)
1343 1343 jnode = '"%s"' % hex(ctx.node())
1344 1344 j = encoding.jsonescape
1345 1345
1346 1346 if self._first:
1347 1347 self.ui.write("[\n {")
1348 1348 self._first = False
1349 1349 else:
1350 1350 self.ui.write(",\n {")
1351 1351
1352 1352 if self.ui.quiet:
1353 1353 self.ui.write('\n "rev": %s' % jrev)
1354 1354 self.ui.write(',\n "node": %s' % jnode)
1355 1355 self.ui.write('\n }')
1356 1356 return
1357 1357
1358 1358 self.ui.write('\n "rev": %s' % jrev)
1359 1359 self.ui.write(',\n "node": %s' % jnode)
1360 1360 self.ui.write(',\n "branch": "%s"' % j(ctx.branch()))
1361 1361 self.ui.write(',\n "phase": "%s"' % ctx.phasestr())
1362 1362 self.ui.write(',\n "user": "%s"' % j(ctx.user()))
1363 1363 self.ui.write(',\n "date": [%d, %d]' % ctx.date())
1364 1364 self.ui.write(',\n "desc": "%s"' % j(ctx.description()))
1365 1365
1366 1366 self.ui.write(',\n "bookmarks": [%s]' %
1367 1367 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1368 1368 self.ui.write(',\n "tags": [%s]' %
1369 1369 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1370 1370 self.ui.write(',\n "parents": [%s]' %
1371 1371 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1372 1372
1373 1373 if self.ui.debugflag:
1374 1374 if rev is None:
1375 1375 jmanifestnode = 'null'
1376 1376 else:
1377 1377 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1378 1378 self.ui.write(',\n "manifest": %s' % jmanifestnode)
1379 1379
1380 1380 self.ui.write(',\n "extra": {%s}' %
1381 1381 ", ".join('"%s": "%s"' % (j(k), j(v))
1382 1382 for k, v in ctx.extra().items()))
1383 1383
1384 1384 files = ctx.p1().status(ctx)
1385 1385 self.ui.write(',\n "modified": [%s]' %
1386 1386 ", ".join('"%s"' % j(f) for f in files[0]))
1387 1387 self.ui.write(',\n "added": [%s]' %
1388 1388 ", ".join('"%s"' % j(f) for f in files[1]))
1389 1389 self.ui.write(',\n "removed": [%s]' %
1390 1390 ", ".join('"%s"' % j(f) for f in files[2]))
1391 1391
1392 1392 elif self.ui.verbose:
1393 1393 self.ui.write(',\n "files": [%s]' %
1394 1394 ", ".join('"%s"' % j(f) for f in ctx.files()))
1395 1395
1396 1396 if copies:
1397 1397 self.ui.write(',\n "copies": {%s}' %
1398 1398 ", ".join('"%s": "%s"' % (j(k), j(v))
1399 1399 for k, v in copies))
1400 1400
1401 1401 matchfn = self.matchfn
1402 1402 if matchfn:
1403 1403 stat = self.diffopts.get('stat')
1404 1404 diff = self.diffopts.get('patch')
1405 1405 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1406 1406 node, prev = ctx.node(), ctx.p1().node()
1407 1407 if stat:
1408 1408 self.ui.pushbuffer()
1409 1409 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1410 1410 match=matchfn, stat=True)
1411 1411 self.ui.write(',\n "diffstat": "%s"' % j(self.ui.popbuffer()))
1412 1412 if diff:
1413 1413 self.ui.pushbuffer()
1414 1414 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1415 1415 match=matchfn, stat=False)
1416 1416 self.ui.write(',\n "diff": "%s"' % j(self.ui.popbuffer()))
1417 1417
1418 1418 self.ui.write("\n }")
1419 1419
1420 1420 class changeset_templater(changeset_printer):
1421 1421 '''format changeset information.'''
1422 1422
1423 1423 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1424 1424 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1425 1425 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
1426 1426 defaulttempl = {
1427 1427 'parent': '{rev}:{node|formatnode} ',
1428 1428 'manifest': '{rev}:{node|formatnode}',
1429 1429 'file_copy': '{name} ({source})',
1430 1430 'extra': '{key}={value|stringescape}'
1431 1431 }
1432 1432 # filecopy is preserved for compatibility reasons
1433 1433 defaulttempl['filecopy'] = defaulttempl['file_copy']
1434 1434 self.t = templater.templater(mapfile, {'formatnode': formatnode},
1435 1435 cache=defaulttempl)
1436 1436 if tmpl:
1437 1437 self.t.cache['changeset'] = tmpl
1438 1438
1439 1439 self.cache = {}
1440 1440
1441 1441 # find correct templates for current mode
1442 1442 tmplmodes = [
1443 1443 (True, None),
1444 1444 (self.ui.verbose, 'verbose'),
1445 1445 (self.ui.quiet, 'quiet'),
1446 1446 (self.ui.debugflag, 'debug'),
1447 1447 ]
1448 1448
1449 1449 self._parts = {'header': '', 'footer': '', 'changeset': 'changeset',
1450 1450 'docheader': '', 'docfooter': ''}
1451 1451 for mode, postfix in tmplmodes:
1452 1452 for t in self._parts:
1453 1453 cur = t
1454 1454 if postfix:
1455 1455 cur += "_" + postfix
1456 1456 if mode and cur in self.t:
1457 1457 self._parts[t] = cur
1458 1458
1459 1459 if self._parts['docheader']:
1460 1460 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1461 1461
1462 1462 def close(self):
1463 1463 if self._parts['docfooter']:
1464 1464 if not self.footer:
1465 1465 self.footer = ""
1466 1466 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1467 1467 return super(changeset_templater, self).close()
1468 1468
1469 1469 def _show(self, ctx, copies, matchfn, props):
1470 1470 '''show a single changeset or file revision'''
1471 1471 props = props.copy()
1472 1472 props.update(templatekw.keywords)
1473 1473 props['templ'] = self.t
1474 1474 props['ctx'] = ctx
1475 1475 props['repo'] = self.repo
1476 1476 props['revcache'] = {'copies': copies}
1477 1477 props['cache'] = self.cache
1478 1478
1479 1479 try:
1480 1480 # write header
1481 1481 if self._parts['header']:
1482 1482 h = templater.stringify(self.t(self._parts['header'], **props))
1483 1483 if self.buffered:
1484 1484 self.header[ctx.rev()] = h
1485 1485 else:
1486 1486 if self.lastheader != h:
1487 1487 self.lastheader = h
1488 1488 self.ui.write(h)
1489 1489
1490 1490 # write changeset metadata, then patch if requested
1491 1491 key = self._parts['changeset']
1492 1492 self.ui.write(templater.stringify(self.t(key, **props)))
1493 1493 self.showpatch(ctx, matchfn)
1494 1494
1495 1495 if self._parts['footer']:
1496 1496 if not self.footer:
1497 1497 self.footer = templater.stringify(
1498 1498 self.t(self._parts['footer'], **props))
1499 1499 except KeyError as inst:
1500 1500 msg = _("%s: no key named '%s'")
1501 1501 raise error.Abort(msg % (self.t.mapfile, inst.args[0]))
1502 1502 except SyntaxError as inst:
1503 1503 raise error.Abort('%s: %s' % (self.t.mapfile, inst.args[0]))
1504 1504
1505 1505 def gettemplate(ui, tmpl, style):
1506 1506 """
1507 1507 Find the template matching the given template spec or style.
1508 1508 """
1509 1509
1510 1510 # ui settings
1511 1511 if not tmpl and not style: # template are stronger than style
1512 1512 tmpl = ui.config('ui', 'logtemplate')
1513 1513 if tmpl:
1514 1514 try:
1515 1515 tmpl = templater.unquotestring(tmpl)
1516 1516 except SyntaxError:
1517 1517 pass
1518 1518 return tmpl, None
1519 1519 else:
1520 1520 style = util.expandpath(ui.config('ui', 'style', ''))
1521 1521
1522 1522 if not tmpl and style:
1523 1523 mapfile = style
1524 1524 if not os.path.split(mapfile)[0]:
1525 1525 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1526 1526 or templater.templatepath(mapfile))
1527 1527 if mapname:
1528 1528 mapfile = mapname
1529 1529 return None, mapfile
1530 1530
1531 1531 if not tmpl:
1532 1532 return None, None
1533 1533
1534 1534 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1535 1535
1536 1536 def show_changeset(ui, repo, opts, buffered=False):
1537 1537 """show one changeset using template or regular display.
1538 1538
1539 1539 Display format will be the first non-empty hit of:
1540 1540 1. option 'template'
1541 1541 2. option 'style'
1542 1542 3. [ui] setting 'logtemplate'
1543 1543 4. [ui] setting 'style'
1544 1544 If all of these values are either the unset or the empty string,
1545 1545 regular display via changeset_printer() is done.
1546 1546 """
1547 1547 # options
1548 1548 matchfn = None
1549 1549 if opts.get('patch') or opts.get('stat'):
1550 1550 matchfn = scmutil.matchall(repo)
1551 1551
1552 1552 if opts.get('template') == 'json':
1553 1553 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1554 1554
1555 1555 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1556 1556
1557 1557 if not tmpl and not mapfile:
1558 1558 return changeset_printer(ui, repo, matchfn, opts, buffered)
1559 1559
1560 1560 try:
1561 1561 t = changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile,
1562 1562 buffered)
1563 1563 except SyntaxError as inst:
1564 1564 raise error.Abort(inst.args[0])
1565 1565 return t
1566 1566
1567 1567 def showmarker(ui, marker):
1568 1568 """utility function to display obsolescence marker in a readable way
1569 1569
1570 1570 To be used by debug function."""
1571 1571 ui.write(hex(marker.precnode()))
1572 1572 for repl in marker.succnodes():
1573 1573 ui.write(' ')
1574 1574 ui.write(hex(repl))
1575 1575 ui.write(' %X ' % marker.flags())
1576 1576 parents = marker.parentnodes()
1577 1577 if parents is not None:
1578 1578 ui.write('{%s} ' % ', '.join(hex(p) for p in parents))
1579 1579 ui.write('(%s) ' % util.datestr(marker.date()))
1580 1580 ui.write('{%s}' % (', '.join('%r: %r' % t for t in
1581 1581 sorted(marker.metadata().items())
1582 1582 if t[0] != 'date')))
1583 1583 ui.write('\n')
1584 1584
1585 1585 def finddate(ui, repo, date):
1586 1586 """Find the tipmost changeset that matches the given date spec"""
1587 1587
1588 1588 df = util.matchdate(date)
1589 1589 m = scmutil.matchall(repo)
1590 1590 results = {}
1591 1591
1592 1592 def prep(ctx, fns):
1593 1593 d = ctx.date()
1594 1594 if df(d[0]):
1595 1595 results[ctx.rev()] = d
1596 1596
1597 1597 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1598 1598 rev = ctx.rev()
1599 1599 if rev in results:
1600 1600 ui.status(_("found revision %s from %s\n") %
1601 1601 (rev, util.datestr(results[rev])))
1602 1602 return str(rev)
1603 1603
1604 1604 raise error.Abort(_("revision matching date not found"))
1605 1605
1606 1606 def increasingwindows(windowsize=8, sizelimit=512):
1607 1607 while True:
1608 1608 yield windowsize
1609 1609 if windowsize < sizelimit:
1610 1610 windowsize *= 2
1611 1611
1612 1612 class FileWalkError(Exception):
1613 1613 pass
1614 1614
1615 1615 def walkfilerevs(repo, match, follow, revs, fncache):
1616 1616 '''Walks the file history for the matched files.
1617 1617
1618 1618 Returns the changeset revs that are involved in the file history.
1619 1619
1620 1620 Throws FileWalkError if the file history can't be walked using
1621 1621 filelogs alone.
1622 1622 '''
1623 1623 wanted = set()
1624 1624 copies = []
1625 1625 minrev, maxrev = min(revs), max(revs)
1626 1626 def filerevgen(filelog, last):
1627 1627 """
1628 1628 Only files, no patterns. Check the history of each file.
1629 1629
1630 1630 Examines filelog entries within minrev, maxrev linkrev range
1631 1631 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1632 1632 tuples in backwards order
1633 1633 """
1634 1634 cl_count = len(repo)
1635 1635 revs = []
1636 1636 for j in xrange(0, last + 1):
1637 1637 linkrev = filelog.linkrev(j)
1638 1638 if linkrev < minrev:
1639 1639 continue
1640 1640 # only yield rev for which we have the changelog, it can
1641 1641 # happen while doing "hg log" during a pull or commit
1642 1642 if linkrev >= cl_count:
1643 1643 break
1644 1644
1645 1645 parentlinkrevs = []
1646 1646 for p in filelog.parentrevs(j):
1647 1647 if p != nullrev:
1648 1648 parentlinkrevs.append(filelog.linkrev(p))
1649 1649 n = filelog.node(j)
1650 1650 revs.append((linkrev, parentlinkrevs,
1651 1651 follow and filelog.renamed(n)))
1652 1652
1653 1653 return reversed(revs)
1654 1654 def iterfiles():
1655 1655 pctx = repo['.']
1656 1656 for filename in match.files():
1657 1657 if follow:
1658 1658 if filename not in pctx:
1659 1659 raise error.Abort(_('cannot follow file not in parent '
1660 1660 'revision: "%s"') % filename)
1661 1661 yield filename, pctx[filename].filenode()
1662 1662 else:
1663 1663 yield filename, None
1664 1664 for filename_node in copies:
1665 1665 yield filename_node
1666 1666
1667 1667 for file_, node in iterfiles():
1668 1668 filelog = repo.file(file_)
1669 1669 if not len(filelog):
1670 1670 if node is None:
1671 1671 # A zero count may be a directory or deleted file, so
1672 1672 # try to find matching entries on the slow path.
1673 1673 if follow:
1674 1674 raise error.Abort(
1675 1675 _('cannot follow nonexistent file: "%s"') % file_)
1676 1676 raise FileWalkError("Cannot walk via filelog")
1677 1677 else:
1678 1678 continue
1679 1679
1680 1680 if node is None:
1681 1681 last = len(filelog) - 1
1682 1682 else:
1683 1683 last = filelog.rev(node)
1684 1684
1685 1685 # keep track of all ancestors of the file
1686 1686 ancestors = set([filelog.linkrev(last)])
1687 1687
1688 1688 # iterate from latest to oldest revision
1689 1689 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1690 1690 if not follow:
1691 1691 if rev > maxrev:
1692 1692 continue
1693 1693 else:
1694 1694 # Note that last might not be the first interesting
1695 1695 # rev to us:
1696 1696 # if the file has been changed after maxrev, we'll
1697 1697 # have linkrev(last) > maxrev, and we still need
1698 1698 # to explore the file graph
1699 1699 if rev not in ancestors:
1700 1700 continue
1701 1701 # XXX insert 1327 fix here
1702 1702 if flparentlinkrevs:
1703 1703 ancestors.update(flparentlinkrevs)
1704 1704
1705 1705 fncache.setdefault(rev, []).append(file_)
1706 1706 wanted.add(rev)
1707 1707 if copied:
1708 1708 copies.append(copied)
1709 1709
1710 1710 return wanted
1711 1711
1712 1712 class _followfilter(object):
1713 1713 def __init__(self, repo, onlyfirst=False):
1714 1714 self.repo = repo
1715 1715 self.startrev = nullrev
1716 1716 self.roots = set()
1717 1717 self.onlyfirst = onlyfirst
1718 1718
1719 1719 def match(self, rev):
1720 1720 def realparents(rev):
1721 1721 if self.onlyfirst:
1722 1722 return self.repo.changelog.parentrevs(rev)[0:1]
1723 1723 else:
1724 1724 return filter(lambda x: x != nullrev,
1725 1725 self.repo.changelog.parentrevs(rev))
1726 1726
1727 1727 if self.startrev == nullrev:
1728 1728 self.startrev = rev
1729 1729 return True
1730 1730
1731 1731 if rev > self.startrev:
1732 1732 # forward: all descendants
1733 1733 if not self.roots:
1734 1734 self.roots.add(self.startrev)
1735 1735 for parent in realparents(rev):
1736 1736 if parent in self.roots:
1737 1737 self.roots.add(rev)
1738 1738 return True
1739 1739 else:
1740 1740 # backwards: all parents
1741 1741 if not self.roots:
1742 1742 self.roots.update(realparents(self.startrev))
1743 1743 if rev in self.roots:
1744 1744 self.roots.remove(rev)
1745 1745 self.roots.update(realparents(rev))
1746 1746 return True
1747 1747
1748 1748 return False
1749 1749
1750 1750 def walkchangerevs(repo, match, opts, prepare):
1751 1751 '''Iterate over files and the revs in which they changed.
1752 1752
1753 1753 Callers most commonly need to iterate backwards over the history
1754 1754 in which they are interested. Doing so has awful (quadratic-looking)
1755 1755 performance, so we use iterators in a "windowed" way.
1756 1756
1757 1757 We walk a window of revisions in the desired order. Within the
1758 1758 window, we first walk forwards to gather data, then in the desired
1759 1759 order (usually backwards) to display it.
1760 1760
1761 1761 This function returns an iterator yielding contexts. Before
1762 1762 yielding each context, the iterator will first call the prepare
1763 1763 function on each context in the window in forward order.'''
1764 1764
1765 1765 follow = opts.get('follow') or opts.get('follow_first')
1766 1766 revs = _logrevs(repo, opts)
1767 1767 if not revs:
1768 1768 return []
1769 1769 wanted = set()
1770 1770 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1771 1771 opts.get('removed'))
1772 1772 fncache = {}
1773 1773 change = repo.changectx
1774 1774
1775 1775 # First step is to fill wanted, the set of revisions that we want to yield.
1776 1776 # When it does not induce extra cost, we also fill fncache for revisions in
1777 1777 # wanted: a cache of filenames that were changed (ctx.files()) and that
1778 1778 # match the file filtering conditions.
1779 1779
1780 1780 if match.always():
1781 1781 # No files, no patterns. Display all revs.
1782 1782 wanted = revs
1783 1783 elif not slowpath:
1784 1784 # We only have to read through the filelog to find wanted revisions
1785 1785
1786 1786 try:
1787 1787 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1788 1788 except FileWalkError:
1789 1789 slowpath = True
1790 1790
1791 1791 # We decided to fall back to the slowpath because at least one
1792 1792 # of the paths was not a file. Check to see if at least one of them
1793 1793 # existed in history, otherwise simply return
1794 1794 for path in match.files():
1795 1795 if path == '.' or path in repo.store:
1796 1796 break
1797 1797 else:
1798 1798 return []
1799 1799
1800 1800 if slowpath:
1801 1801 # We have to read the changelog to match filenames against
1802 1802 # changed files
1803 1803
1804 1804 if follow:
1805 1805 raise error.Abort(_('can only follow copies/renames for explicit '
1806 1806 'filenames'))
1807 1807
1808 1808 # The slow path checks files modified in every changeset.
1809 1809 # This is really slow on large repos, so compute the set lazily.
1810 1810 class lazywantedset(object):
1811 1811 def __init__(self):
1812 1812 self.set = set()
1813 1813 self.revs = set(revs)
1814 1814
1815 1815 # No need to worry about locality here because it will be accessed
1816 1816 # in the same order as the increasing window below.
1817 1817 def __contains__(self, value):
1818 1818 if value in self.set:
1819 1819 return True
1820 1820 elif not value in self.revs:
1821 1821 return False
1822 1822 else:
1823 1823 self.revs.discard(value)
1824 1824 ctx = change(value)
1825 1825 matches = filter(match, ctx.files())
1826 1826 if matches:
1827 1827 fncache[value] = matches
1828 1828 self.set.add(value)
1829 1829 return True
1830 1830 return False
1831 1831
1832 1832 def discard(self, value):
1833 1833 self.revs.discard(value)
1834 1834 self.set.discard(value)
1835 1835
1836 1836 wanted = lazywantedset()
1837 1837
1838 1838 # it might be worthwhile to do this in the iterator if the rev range
1839 1839 # is descending and the prune args are all within that range
1840 1840 for rev in opts.get('prune', ()):
1841 1841 rev = repo[rev].rev()
1842 1842 ff = _followfilter(repo)
1843 1843 stop = min(revs[0], revs[-1])
1844 1844 for x in xrange(rev, stop - 1, -1):
1845 1845 if ff.match(x):
1846 1846 wanted = wanted - [x]
1847 1847
1848 1848 # Now that wanted is correctly initialized, we can iterate over the
1849 1849 # revision range, yielding only revisions in wanted.
1850 1850 def iterate():
1851 1851 if follow and match.always():
1852 1852 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1853 1853 def want(rev):
1854 1854 return ff.match(rev) and rev in wanted
1855 1855 else:
1856 1856 def want(rev):
1857 1857 return rev in wanted
1858 1858
1859 1859 it = iter(revs)
1860 1860 stopiteration = False
1861 1861 for windowsize in increasingwindows():
1862 1862 nrevs = []
1863 1863 for i in xrange(windowsize):
1864 1864 rev = next(it, None)
1865 1865 if rev is None:
1866 1866 stopiteration = True
1867 1867 break
1868 1868 elif want(rev):
1869 1869 nrevs.append(rev)
1870 1870 for rev in sorted(nrevs):
1871 1871 fns = fncache.get(rev)
1872 1872 ctx = change(rev)
1873 1873 if not fns:
1874 1874 def fns_generator():
1875 1875 for f in ctx.files():
1876 1876 if match(f):
1877 1877 yield f
1878 1878 fns = fns_generator()
1879 1879 prepare(ctx, fns)
1880 1880 for rev in nrevs:
1881 1881 yield change(rev)
1882 1882
1883 1883 if stopiteration:
1884 1884 break
1885 1885
1886 1886 return iterate()
1887 1887
1888 1888 def _makefollowlogfilematcher(repo, files, followfirst):
1889 1889 # When displaying a revision with --patch --follow FILE, we have
1890 1890 # to know which file of the revision must be diffed. With
1891 1891 # --follow, we want the names of the ancestors of FILE in the
1892 1892 # revision, stored in "fcache". "fcache" is populated by
1893 1893 # reproducing the graph traversal already done by --follow revset
1894 1894 # and relating linkrevs to file names (which is not "correct" but
1895 1895 # good enough).
1896 1896 fcache = {}
1897 1897 fcacheready = [False]
1898 1898 pctx = repo['.']
1899 1899
1900 1900 def populate():
1901 1901 for fn in files:
1902 1902 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
1903 1903 for c in i:
1904 1904 fcache.setdefault(c.linkrev(), set()).add(c.path())
1905 1905
1906 1906 def filematcher(rev):
1907 1907 if not fcacheready[0]:
1908 1908 # Lazy initialization
1909 1909 fcacheready[0] = True
1910 1910 populate()
1911 1911 return scmutil.matchfiles(repo, fcache.get(rev, []))
1912 1912
1913 1913 return filematcher
1914 1914
1915 1915 def _makenofollowlogfilematcher(repo, pats, opts):
1916 1916 '''hook for extensions to override the filematcher for non-follow cases'''
1917 1917 return None
1918 1918
1919 1919 def _makelogrevset(repo, pats, opts, revs):
1920 1920 """Return (expr, filematcher) where expr is a revset string built
1921 1921 from log options and file patterns or None. If --stat or --patch
1922 1922 are not passed filematcher is None. Otherwise it is a callable
1923 1923 taking a revision number and returning a match objects filtering
1924 1924 the files to be detailed when displaying the revision.
1925 1925 """
1926 1926 opt2revset = {
1927 1927 'no_merges': ('not merge()', None),
1928 1928 'only_merges': ('merge()', None),
1929 1929 '_ancestors': ('ancestors(%(val)s)', None),
1930 1930 '_fancestors': ('_firstancestors(%(val)s)', None),
1931 1931 '_descendants': ('descendants(%(val)s)', None),
1932 1932 '_fdescendants': ('_firstdescendants(%(val)s)', None),
1933 1933 '_matchfiles': ('_matchfiles(%(val)s)', None),
1934 1934 'date': ('date(%(val)r)', None),
1935 1935 'branch': ('branch(%(val)r)', ' or '),
1936 1936 '_patslog': ('filelog(%(val)r)', ' or '),
1937 1937 '_patsfollow': ('follow(%(val)r)', ' or '),
1938 1938 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
1939 1939 'keyword': ('keyword(%(val)r)', ' or '),
1940 1940 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
1941 1941 'user': ('user(%(val)r)', ' or '),
1942 1942 }
1943 1943
1944 1944 opts = dict(opts)
1945 1945 # follow or not follow?
1946 1946 follow = opts.get('follow') or opts.get('follow_first')
1947 1947 if opts.get('follow_first'):
1948 1948 followfirst = 1
1949 1949 else:
1950 1950 followfirst = 0
1951 1951 # --follow with FILE behavior depends on revs...
1952 1952 it = iter(revs)
1953 1953 startrev = it.next()
1954 1954 followdescendants = startrev < next(it, startrev)
1955 1955
1956 1956 # branch and only_branch are really aliases and must be handled at
1957 1957 # the same time
1958 1958 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
1959 1959 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
1960 1960 # pats/include/exclude are passed to match.match() directly in
1961 1961 # _matchfiles() revset but walkchangerevs() builds its matcher with
1962 1962 # scmutil.match(). The difference is input pats are globbed on
1963 1963 # platforms without shell expansion (windows).
1964 1964 wctx = repo[None]
1965 1965 match, pats = scmutil.matchandpats(wctx, pats, opts)
1966 1966 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1967 1967 opts.get('removed'))
1968 1968 if not slowpath:
1969 1969 for f in match.files():
1970 1970 if follow and f not in wctx:
1971 1971 # If the file exists, it may be a directory, so let it
1972 1972 # take the slow path.
1973 1973 if os.path.exists(repo.wjoin(f)):
1974 1974 slowpath = True
1975 1975 continue
1976 1976 else:
1977 1977 raise error.Abort(_('cannot follow file not in parent '
1978 1978 'revision: "%s"') % f)
1979 1979 filelog = repo.file(f)
1980 1980 if not filelog:
1981 1981 # A zero count may be a directory or deleted file, so
1982 1982 # try to find matching entries on the slow path.
1983 1983 if follow:
1984 1984 raise error.Abort(
1985 1985 _('cannot follow nonexistent file: "%s"') % f)
1986 1986 slowpath = True
1987 1987
1988 1988 # We decided to fall back to the slowpath because at least one
1989 1989 # of the paths was not a file. Check to see if at least one of them
1990 1990 # existed in history - in that case, we'll continue down the
1991 1991 # slowpath; otherwise, we can turn off the slowpath
1992 1992 if slowpath:
1993 1993 for path in match.files():
1994 1994 if path == '.' or path in repo.store:
1995 1995 break
1996 1996 else:
1997 1997 slowpath = False
1998 1998
1999 1999 fpats = ('_patsfollow', '_patsfollowfirst')
2000 2000 fnopats = (('_ancestors', '_fancestors'),
2001 2001 ('_descendants', '_fdescendants'))
2002 2002 if slowpath:
2003 2003 # See walkchangerevs() slow path.
2004 2004 #
2005 2005 # pats/include/exclude cannot be represented as separate
2006 2006 # revset expressions as their filtering logic applies at file
2007 2007 # level. For instance "-I a -X a" matches a revision touching
2008 2008 # "a" and "b" while "file(a) and not file(b)" does
2009 2009 # not. Besides, filesets are evaluated against the working
2010 2010 # directory.
2011 2011 matchargs = ['r:', 'd:relpath']
2012 2012 for p in pats:
2013 2013 matchargs.append('p:' + p)
2014 2014 for p in opts.get('include', []):
2015 2015 matchargs.append('i:' + p)
2016 2016 for p in opts.get('exclude', []):
2017 2017 matchargs.append('x:' + p)
2018 2018 matchargs = ','.join(('%r' % p) for p in matchargs)
2019 2019 opts['_matchfiles'] = matchargs
2020 2020 if follow:
2021 2021 opts[fnopats[0][followfirst]] = '.'
2022 2022 else:
2023 2023 if follow:
2024 2024 if pats:
2025 2025 # follow() revset interprets its file argument as a
2026 2026 # manifest entry, so use match.files(), not pats.
2027 2027 opts[fpats[followfirst]] = list(match.files())
2028 2028 else:
2029 2029 op = fnopats[followdescendants][followfirst]
2030 2030 opts[op] = 'rev(%d)' % startrev
2031 2031 else:
2032 2032 opts['_patslog'] = list(pats)
2033 2033
2034 2034 filematcher = None
2035 2035 if opts.get('patch') or opts.get('stat'):
2036 2036 # When following files, track renames via a special matcher.
2037 2037 # If we're forced to take the slowpath it means we're following
2038 2038 # at least one pattern/directory, so don't bother with rename tracking.
2039 2039 if follow and not match.always() and not slowpath:
2040 2040 # _makefollowlogfilematcher expects its files argument to be
2041 2041 # relative to the repo root, so use match.files(), not pats.
2042 2042 filematcher = _makefollowlogfilematcher(repo, match.files(),
2043 2043 followfirst)
2044 2044 else:
2045 2045 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2046 2046 if filematcher is None:
2047 2047 filematcher = lambda rev: match
2048 2048
2049 2049 expr = []
2050 2050 for op, val in sorted(opts.iteritems()):
2051 2051 if not val:
2052 2052 continue
2053 2053 if op not in opt2revset:
2054 2054 continue
2055 2055 revop, andor = opt2revset[op]
2056 2056 if '%(val)' not in revop:
2057 2057 expr.append(revop)
2058 2058 else:
2059 2059 if not isinstance(val, list):
2060 2060 e = revop % {'val': val}
2061 2061 else:
2062 2062 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2063 2063 expr.append(e)
2064 2064
2065 2065 if expr:
2066 2066 expr = '(' + ' and '.join(expr) + ')'
2067 2067 else:
2068 2068 expr = None
2069 2069 return expr, filematcher
2070 2070
2071 2071 def _logrevs(repo, opts):
2072 2072 # Default --rev value depends on --follow but --follow behavior
2073 2073 # depends on revisions resolved from --rev...
2074 2074 follow = opts.get('follow') or opts.get('follow_first')
2075 2075 if opts.get('rev'):
2076 2076 revs = scmutil.revrange(repo, opts['rev'])
2077 2077 elif follow and repo.dirstate.p1() == nullid:
2078 2078 revs = revset.baseset()
2079 2079 elif follow:
2080 2080 revs = repo.revs('reverse(:.)')
2081 2081 else:
2082 2082 revs = revset.spanset(repo)
2083 2083 revs.reverse()
2084 2084 return revs
2085 2085
2086 2086 def getgraphlogrevs(repo, pats, opts):
2087 2087 """Return (revs, expr, filematcher) where revs is an iterable of
2088 2088 revision numbers, expr is a revset string built from log options
2089 2089 and file patterns or None, and used to filter 'revs'. If --stat or
2090 2090 --patch are not passed filematcher is None. Otherwise it is a
2091 2091 callable taking a revision number and returning a match objects
2092 2092 filtering the files to be detailed when displaying the revision.
2093 2093 """
2094 2094 limit = loglimit(opts)
2095 2095 revs = _logrevs(repo, opts)
2096 2096 if not revs:
2097 2097 return revset.baseset(), None, None
2098 2098 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2099 2099 if opts.get('rev'):
2100 2100 # User-specified revs might be unsorted, but don't sort before
2101 2101 # _makelogrevset because it might depend on the order of revs
2102 2102 revs.sort(reverse=True)
2103 2103 if expr:
2104 2104 # Revset matchers often operate faster on revisions in changelog
2105 2105 # order, because most filters deal with the changelog.
2106 2106 revs.reverse()
2107 2107 matcher = revset.match(repo.ui, expr)
2108 2108 # Revset matches can reorder revisions. "A or B" typically returns
2109 2109 # returns the revision matching A then the revision matching B. Sort
2110 2110 # again to fix that.
2111 2111 revs = matcher(repo, revs)
2112 2112 revs.sort(reverse=True)
2113 2113 if limit is not None:
2114 2114 limitedrevs = []
2115 2115 for idx, rev in enumerate(revs):
2116 2116 if idx >= limit:
2117 2117 break
2118 2118 limitedrevs.append(rev)
2119 2119 revs = revset.baseset(limitedrevs)
2120 2120
2121 2121 return revs, expr, filematcher
2122 2122
2123 2123 def getlogrevs(repo, pats, opts):
2124 2124 """Return (revs, expr, filematcher) where revs is an iterable of
2125 2125 revision numbers, expr is a revset string built from log options
2126 2126 and file patterns or None, and used to filter 'revs'. If --stat or
2127 2127 --patch are not passed filematcher is None. Otherwise it is a
2128 2128 callable taking a revision number and returning a match objects
2129 2129 filtering the files to be detailed when displaying the revision.
2130 2130 """
2131 2131 limit = loglimit(opts)
2132 2132 revs = _logrevs(repo, opts)
2133 2133 if not revs:
2134 2134 return revset.baseset([]), None, None
2135 2135 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2136 2136 if expr:
2137 2137 # Revset matchers often operate faster on revisions in changelog
2138 2138 # order, because most filters deal with the changelog.
2139 2139 if not opts.get('rev'):
2140 2140 revs.reverse()
2141 2141 matcher = revset.match(repo.ui, expr)
2142 2142 # Revset matches can reorder revisions. "A or B" typically returns
2143 2143 # returns the revision matching A then the revision matching B. Sort
2144 2144 # again to fix that.
2145 2145 revs = matcher(repo, revs)
2146 2146 if not opts.get('rev'):
2147 2147 revs.sort(reverse=True)
2148 2148 if limit is not None:
2149 2149 limitedrevs = []
2150 2150 for idx, r in enumerate(revs):
2151 2151 if limit <= idx:
2152 2152 break
2153 2153 limitedrevs.append(r)
2154 2154 revs = revset.baseset(limitedrevs)
2155 2155
2156 2156 return revs, expr, filematcher
2157 2157
2158 2158 def _graphnodeformatter(ui, displayer):
2159 2159 spec = ui.config('ui', 'graphnodetemplate')
2160 2160 if not spec:
2161 2161 return templatekw.showgraphnode # fast path for "{graphnode}"
2162 2162
2163 2163 templ = formatter.gettemplater(ui, 'graphnode', spec)
2164 2164 cache = {}
2165 2165 if isinstance(displayer, changeset_templater):
2166 2166 cache = displayer.cache # reuse cache of slow templates
2167 2167 props = templatekw.keywords.copy()
2168 2168 props['templ'] = templ
2169 2169 props['cache'] = cache
2170 2170 def formatnode(repo, ctx):
2171 2171 props['ctx'] = ctx
2172 2172 props['repo'] = repo
2173 2173 props['revcache'] = {}
2174 2174 return templater.stringify(templ('graphnode', **props))
2175 2175 return formatnode
2176 2176
2177 2177 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2178 2178 filematcher=None):
2179 2179 formatnode = _graphnodeformatter(ui, displayer)
2180 2180 seen, state = [], graphmod.asciistate()
2181 2181 for rev, type, ctx, parents in dag:
2182 2182 char = formatnode(repo, ctx)
2183 2183 copies = None
2184 2184 if getrenamed and ctx.rev():
2185 2185 copies = []
2186 2186 for fn in ctx.files():
2187 2187 rename = getrenamed(fn, ctx.rev())
2188 2188 if rename:
2189 2189 copies.append((fn, rename[0]))
2190 2190 revmatchfn = None
2191 2191 if filematcher is not None:
2192 2192 revmatchfn = filematcher(ctx.rev())
2193 2193 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2194 2194 lines = displayer.hunk.pop(rev).split('\n')
2195 2195 if not lines[-1]:
2196 2196 del lines[-1]
2197 2197 displayer.flush(ctx)
2198 2198 edges = edgefn(type, char, lines, seen, rev, parents)
2199 2199 for type, char, lines, coldata in edges:
2200 2200 graphmod.ascii(ui, state, type, char, lines, coldata)
2201 2201 displayer.close()
2202 2202
2203 2203 def graphlog(ui, repo, *pats, **opts):
2204 2204 # Parameters are identical to log command ones
2205 2205 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2206 2206 revdag = graphmod.dagwalker(repo, revs)
2207 2207
2208 2208 getrenamed = None
2209 2209 if opts.get('copies'):
2210 2210 endrev = None
2211 2211 if opts.get('rev'):
2212 2212 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2213 2213 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2214 2214 displayer = show_changeset(ui, repo, opts, buffered=True)
2215 2215 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2216 2216 filematcher)
2217 2217
2218 2218 def checkunsupportedgraphflags(pats, opts):
2219 2219 for op in ["newest_first"]:
2220 2220 if op in opts and opts[op]:
2221 2221 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2222 2222 % op.replace("_", "-"))
2223 2223
2224 2224 def graphrevs(repo, nodes, opts):
2225 2225 limit = loglimit(opts)
2226 2226 nodes.reverse()
2227 2227 if limit is not None:
2228 2228 nodes = nodes[:limit]
2229 2229 return graphmod.nodes(repo, nodes)
2230 2230
2231 2231 def add(ui, repo, match, prefix, explicitonly, **opts):
2232 2232 join = lambda f: os.path.join(prefix, f)
2233 2233 bad = []
2234 2234
2235 2235 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2236 2236 names = []
2237 2237 wctx = repo[None]
2238 2238 cca = None
2239 2239 abort, warn = scmutil.checkportabilityalert(ui)
2240 2240 if abort or warn:
2241 2241 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2242 2242
2243 2243 badmatch = matchmod.badmatch(match, badfn)
2244 2244 dirstate = repo.dirstate
2245 2245 # We don't want to just call wctx.walk here, since it would return a lot of
2246 2246 # clean files, which we aren't interested in and takes time.
2247 2247 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2248 2248 True, False, full=False)):
2249 2249 exact = match.exact(f)
2250 2250 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2251 2251 if cca:
2252 2252 cca(f)
2253 2253 names.append(f)
2254 2254 if ui.verbose or not exact:
2255 2255 ui.status(_('adding %s\n') % match.rel(f))
2256 2256
2257 2257 for subpath in sorted(wctx.substate):
2258 2258 sub = wctx.sub(subpath)
2259 2259 try:
2260 2260 submatch = matchmod.narrowmatcher(subpath, match)
2261 2261 if opts.get('subrepos'):
2262 2262 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2263 2263 else:
2264 2264 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2265 2265 except error.LookupError:
2266 2266 ui.status(_("skipping missing subrepository: %s\n")
2267 2267 % join(subpath))
2268 2268
2269 2269 if not opts.get('dry_run'):
2270 2270 rejected = wctx.add(names, prefix)
2271 2271 bad.extend(f for f in rejected if f in match.files())
2272 2272 return bad
2273 2273
2274 2274 def forget(ui, repo, match, prefix, explicitonly):
2275 2275 join = lambda f: os.path.join(prefix, f)
2276 2276 bad = []
2277 2277 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2278 2278 wctx = repo[None]
2279 2279 forgot = []
2280 2280
2281 2281 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2282 2282 forget = sorted(s[0] + s[1] + s[3] + s[6])
2283 2283 if explicitonly:
2284 2284 forget = [f for f in forget if match.exact(f)]
2285 2285
2286 2286 for subpath in sorted(wctx.substate):
2287 2287 sub = wctx.sub(subpath)
2288 2288 try:
2289 2289 submatch = matchmod.narrowmatcher(subpath, match)
2290 2290 subbad, subforgot = sub.forget(submatch, prefix)
2291 2291 bad.extend([subpath + '/' + f for f in subbad])
2292 2292 forgot.extend([subpath + '/' + f for f in subforgot])
2293 2293 except error.LookupError:
2294 2294 ui.status(_("skipping missing subrepository: %s\n")
2295 2295 % join(subpath))
2296 2296
2297 2297 if not explicitonly:
2298 2298 for f in match.files():
2299 2299 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2300 2300 if f not in forgot:
2301 2301 if repo.wvfs.exists(f):
2302 2302 # Don't complain if the exact case match wasn't given.
2303 2303 # But don't do this until after checking 'forgot', so
2304 2304 # that subrepo files aren't normalized, and this op is
2305 2305 # purely from data cached by the status walk above.
2306 2306 if repo.dirstate.normalize(f) in repo.dirstate:
2307 2307 continue
2308 2308 ui.warn(_('not removing %s: '
2309 2309 'file is already untracked\n')
2310 2310 % match.rel(f))
2311 2311 bad.append(f)
2312 2312
2313 2313 for f in forget:
2314 2314 if ui.verbose or not match.exact(f):
2315 2315 ui.status(_('removing %s\n') % match.rel(f))
2316 2316
2317 2317 rejected = wctx.forget(forget, prefix)
2318 2318 bad.extend(f for f in rejected if f in match.files())
2319 2319 forgot.extend(f for f in forget if f not in rejected)
2320 2320 return bad, forgot
2321 2321
2322 2322 def files(ui, ctx, m, fm, fmt, subrepos):
2323 2323 rev = ctx.rev()
2324 2324 ret = 1
2325 2325 ds = ctx.repo().dirstate
2326 2326
2327 2327 for f in ctx.matches(m):
2328 2328 if rev is None and ds[f] == 'r':
2329 2329 continue
2330 2330 fm.startitem()
2331 2331 if ui.verbose:
2332 2332 fc = ctx[f]
2333 2333 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2334 2334 fm.data(abspath=f)
2335 2335 fm.write('path', fmt, m.rel(f))
2336 2336 ret = 0
2337 2337
2338 2338 for subpath in sorted(ctx.substate):
2339 2339 def matchessubrepo(subpath):
2340 2340 return (m.always() or m.exact(subpath)
2341 2341 or any(f.startswith(subpath + '/') for f in m.files()))
2342 2342
2343 2343 if subrepos or matchessubrepo(subpath):
2344 2344 sub = ctx.sub(subpath)
2345 2345 try:
2346 2346 submatch = matchmod.narrowmatcher(subpath, m)
2347 2347 if sub.printfiles(ui, submatch, fm, fmt, subrepos) == 0:
2348 2348 ret = 0
2349 2349 except error.LookupError:
2350 2350 ui.status(_("skipping missing subrepository: %s\n")
2351 2351 % m.abs(subpath))
2352 2352
2353 2353 return ret
2354 2354
2355 2355 def remove(ui, repo, m, prefix, after, force, subrepos):
2356 2356 join = lambda f: os.path.join(prefix, f)
2357 2357 ret = 0
2358 2358 s = repo.status(match=m, clean=True)
2359 2359 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2360 2360
2361 2361 wctx = repo[None]
2362 2362
2363 2363 for subpath in sorted(wctx.substate):
2364 2364 def matchessubrepo(matcher, subpath):
2365 2365 if matcher.exact(subpath):
2366 2366 return True
2367 2367 for f in matcher.files():
2368 2368 if f.startswith(subpath):
2369 2369 return True
2370 2370 return False
2371 2371
2372 2372 if subrepos or matchessubrepo(m, subpath):
2373 2373 sub = wctx.sub(subpath)
2374 2374 try:
2375 2375 submatch = matchmod.narrowmatcher(subpath, m)
2376 2376 if sub.removefiles(submatch, prefix, after, force, subrepos):
2377 2377 ret = 1
2378 2378 except error.LookupError:
2379 2379 ui.status(_("skipping missing subrepository: %s\n")
2380 2380 % join(subpath))
2381 2381
2382 2382 # warn about failure to delete explicit files/dirs
2383 2383 deleteddirs = util.dirs(deleted)
2384 2384 for f in m.files():
2385 2385 def insubrepo():
2386 2386 for subpath in wctx.substate:
2387 2387 if f.startswith(subpath):
2388 2388 return True
2389 2389 return False
2390 2390
2391 2391 isdir = f in deleteddirs or wctx.hasdir(f)
2392 2392 if f in repo.dirstate or isdir or f == '.' or insubrepo():
2393 2393 continue
2394 2394
2395 2395 if repo.wvfs.exists(f):
2396 2396 if repo.wvfs.isdir(f):
2397 2397 ui.warn(_('not removing %s: no tracked files\n')
2398 2398 % m.rel(f))
2399 2399 else:
2400 2400 ui.warn(_('not removing %s: file is untracked\n')
2401 2401 % m.rel(f))
2402 2402 # missing files will generate a warning elsewhere
2403 2403 ret = 1
2404 2404
2405 2405 if force:
2406 2406 list = modified + deleted + clean + added
2407 2407 elif after:
2408 2408 list = deleted
2409 2409 for f in modified + added + clean:
2410 2410 ui.warn(_('not removing %s: file still exists\n') % m.rel(f))
2411 2411 ret = 1
2412 2412 else:
2413 2413 list = deleted + clean
2414 2414 for f in modified:
2415 2415 ui.warn(_('not removing %s: file is modified (use -f'
2416 2416 ' to force removal)\n') % m.rel(f))
2417 2417 ret = 1
2418 2418 for f in added:
2419 2419 ui.warn(_('not removing %s: file has been marked for add'
2420 2420 ' (use forget to undo)\n') % m.rel(f))
2421 2421 ret = 1
2422 2422
2423 2423 for f in sorted(list):
2424 2424 if ui.verbose or not m.exact(f):
2425 2425 ui.status(_('removing %s\n') % m.rel(f))
2426 2426
2427 2427 wlock = repo.wlock()
2428 2428 try:
2429 2429 if not after:
2430 2430 for f in list:
2431 2431 if f in added:
2432 2432 continue # we never unlink added files on remove
2433 2433 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
2434 2434 repo[None].forget(list)
2435 2435 finally:
2436 2436 wlock.release()
2437 2437
2438 2438 return ret
2439 2439
2440 2440 def cat(ui, repo, ctx, matcher, prefix, **opts):
2441 2441 err = 1
2442 2442
2443 2443 def write(path):
2444 2444 fp = makefileobj(repo, opts.get('output'), ctx.node(),
2445 2445 pathname=os.path.join(prefix, path))
2446 2446 data = ctx[path].data()
2447 2447 if opts.get('decode'):
2448 2448 data = repo.wwritedata(path, data)
2449 2449 fp.write(data)
2450 2450 fp.close()
2451 2451
2452 2452 # Automation often uses hg cat on single files, so special case it
2453 2453 # for performance to avoid the cost of parsing the manifest.
2454 2454 if len(matcher.files()) == 1 and not matcher.anypats():
2455 2455 file = matcher.files()[0]
2456 2456 mf = repo.manifest
2457 2457 mfnode = ctx.manifestnode()
2458 2458 if mfnode and mf.find(mfnode, file)[0]:
2459 2459 write(file)
2460 2460 return 0
2461 2461
2462 2462 # Don't warn about "missing" files that are really in subrepos
2463 2463 def badfn(path, msg):
2464 2464 for subpath in ctx.substate:
2465 2465 if path.startswith(subpath):
2466 2466 return
2467 2467 matcher.bad(path, msg)
2468 2468
2469 2469 for abs in ctx.walk(matchmod.badmatch(matcher, badfn)):
2470 2470 write(abs)
2471 2471 err = 0
2472 2472
2473 2473 for subpath in sorted(ctx.substate):
2474 2474 sub = ctx.sub(subpath)
2475 2475 try:
2476 2476 submatch = matchmod.narrowmatcher(subpath, matcher)
2477 2477
2478 2478 if not sub.cat(submatch, os.path.join(prefix, sub._path),
2479 2479 **opts):
2480 2480 err = 0
2481 2481 except error.RepoLookupError:
2482 2482 ui.status(_("skipping missing subrepository: %s\n")
2483 2483 % os.path.join(prefix, subpath))
2484 2484
2485 2485 return err
2486 2486
2487 2487 def commit(ui, repo, commitfunc, pats, opts):
2488 2488 '''commit the specified files or all outstanding changes'''
2489 2489 date = opts.get('date')
2490 2490 if date:
2491 2491 opts['date'] = util.parsedate(date)
2492 2492 message = logmessage(ui, opts)
2493 2493 matcher = scmutil.match(repo[None], pats, opts)
2494 2494
2495 2495 # extract addremove carefully -- this function can be called from a command
2496 2496 # that doesn't support addremove
2497 2497 if opts.get('addremove'):
2498 2498 if scmutil.addremove(repo, matcher, "", opts) != 0:
2499 2499 raise error.Abort(
2500 2500 _("failed to mark all new/missing files as added/removed"))
2501 2501
2502 2502 return commitfunc(ui, repo, message, matcher, opts)
2503 2503
2504 2504 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2505 2505 # avoid cycle context -> subrepo -> cmdutil
2506 2506 import context
2507 2507
2508 2508 # amend will reuse the existing user if not specified, but the obsolete
2509 2509 # marker creation requires that the current user's name is specified.
2510 2510 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2511 2511 ui.username() # raise exception if username not set
2512 2512
2513 2513 ui.note(_('amending changeset %s\n') % old)
2514 2514 base = old.p1()
2515 2515 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2516 2516
2517 2517 wlock = lock = newid = None
2518 2518 try:
2519 2519 wlock = repo.wlock()
2520 2520 lock = repo.lock()
2521 2521 tr = repo.transaction('amend')
2522 2522 try:
2523 2523 # See if we got a message from -m or -l, if not, open the editor
2524 2524 # with the message of the changeset to amend
2525 2525 message = logmessage(ui, opts)
2526 2526 # ensure logfile does not conflict with later enforcement of the
2527 2527 # message. potential logfile content has been processed by
2528 2528 # `logmessage` anyway.
2529 2529 opts.pop('logfile')
2530 2530 # First, do a regular commit to record all changes in the working
2531 2531 # directory (if there are any)
2532 2532 ui.callhooks = False
2533 2533 activebookmark = repo._activebookmark
2534 2534 try:
2535 2535 repo._activebookmark = None
2536 2536 opts['message'] = 'temporary amend commit for %s' % old
2537 2537 node = commit(ui, repo, commitfunc, pats, opts)
2538 2538 finally:
2539 2539 repo._activebookmark = activebookmark
2540 2540 ui.callhooks = True
2541 2541 ctx = repo[node]
2542 2542
2543 2543 # Participating changesets:
2544 2544 #
2545 2545 # node/ctx o - new (intermediate) commit that contains changes
2546 2546 # | from working dir to go into amending commit
2547 2547 # | (or a workingctx if there were no changes)
2548 2548 # |
2549 2549 # old o - changeset to amend
2550 2550 # |
2551 2551 # base o - parent of amending changeset
2552 2552
2553 2553 # Update extra dict from amended commit (e.g. to preserve graft
2554 2554 # source)
2555 2555 extra.update(old.extra())
2556 2556
2557 2557 # Also update it from the intermediate commit or from the wctx
2558 2558 extra.update(ctx.extra())
2559 2559
2560 2560 if len(old.parents()) > 1:
2561 2561 # ctx.files() isn't reliable for merges, so fall back to the
2562 2562 # slower repo.status() method
2563 2563 files = set([fn for st in repo.status(base, old)[:3]
2564 2564 for fn in st])
2565 2565 else:
2566 2566 files = set(old.files())
2567 2567
2568 2568 # Second, we use either the commit we just did, or if there were no
2569 2569 # changes the parent of the working directory as the version of the
2570 2570 # files in the final amend commit
2571 2571 if node:
2572 2572 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2573 2573
2574 2574 user = ctx.user()
2575 2575 date = ctx.date()
2576 2576 # Recompute copies (avoid recording a -> b -> a)
2577 2577 copied = copies.pathcopies(base, ctx)
2578 2578 if old.p2:
2579 2579 copied.update(copies.pathcopies(old.p2(), ctx))
2580 2580
2581 2581 # Prune files which were reverted by the updates: if old
2582 2582 # introduced file X and our intermediate commit, node,
2583 2583 # renamed that file, then those two files are the same and
2584 2584 # we can discard X from our list of files. Likewise if X
2585 2585 # was deleted, it's no longer relevant
2586 2586 files.update(ctx.files())
2587 2587
2588 2588 def samefile(f):
2589 2589 if f in ctx.manifest():
2590 2590 a = ctx.filectx(f)
2591 2591 if f in base.manifest():
2592 2592 b = base.filectx(f)
2593 2593 return (not a.cmp(b)
2594 2594 and a.flags() == b.flags())
2595 2595 else:
2596 2596 return False
2597 2597 else:
2598 2598 return f not in base.manifest()
2599 2599 files = [f for f in files if not samefile(f)]
2600 2600
2601 2601 def filectxfn(repo, ctx_, path):
2602 2602 try:
2603 2603 fctx = ctx[path]
2604 2604 flags = fctx.flags()
2605 2605 mctx = context.memfilectx(repo,
2606 2606 fctx.path(), fctx.data(),
2607 2607 islink='l' in flags,
2608 2608 isexec='x' in flags,
2609 2609 copied=copied.get(path))
2610 2610 return mctx
2611 2611 except KeyError:
2612 2612 return None
2613 2613 else:
2614 2614 ui.note(_('copying changeset %s to %s\n') % (old, base))
2615 2615
2616 2616 # Use version of files as in the old cset
2617 2617 def filectxfn(repo, ctx_, path):
2618 2618 try:
2619 2619 return old.filectx(path)
2620 2620 except KeyError:
2621 2621 return None
2622 2622
2623 2623 user = opts.get('user') or old.user()
2624 2624 date = opts.get('date') or old.date()
2625 2625 editform = mergeeditform(old, 'commit.amend')
2626 2626 editor = getcommiteditor(editform=editform, **opts)
2627 2627 if not message:
2628 2628 editor = getcommiteditor(edit=True, editform=editform)
2629 2629 message = old.description()
2630 2630
2631 2631 pureextra = extra.copy()
2632 2632 if 'amend_source' in pureextra:
2633 2633 del pureextra['amend_source']
2634 2634 pureoldextra = old.extra()
2635 2635 if 'amend_source' in pureoldextra:
2636 2636 del pureoldextra['amend_source']
2637 2637 extra['amend_source'] = old.hex()
2638 2638
2639 2639 new = context.memctx(repo,
2640 2640 parents=[base.node(), old.p2().node()],
2641 2641 text=message,
2642 2642 files=files,
2643 2643 filectxfn=filectxfn,
2644 2644 user=user,
2645 2645 date=date,
2646 2646 extra=extra,
2647 2647 editor=editor)
2648 2648
2649 2649 newdesc = changelog.stripdesc(new.description())
2650 2650 if ((not node)
2651 2651 and newdesc == old.description()
2652 2652 and user == old.user()
2653 2653 and date == old.date()
2654 2654 and pureextra == pureoldextra):
2655 2655 # nothing changed. continuing here would create a new node
2656 2656 # anyway because of the amend_source noise.
2657 2657 #
2658 2658 # This not what we expect from amend.
2659 2659 return old.node()
2660 2660
2661 2661 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2662 2662 try:
2663 2663 if opts.get('secret'):
2664 2664 commitphase = 'secret'
2665 2665 else:
2666 2666 commitphase = old.phase()
2667 2667 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2668 2668 newid = repo.commitctx(new)
2669 2669 finally:
2670 2670 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2671 2671 if newid != old.node():
2672 2672 # Reroute the working copy parent to the new changeset
2673 2673 repo.setparents(newid, nullid)
2674 2674
2675 2675 # Move bookmarks from old parent to amend commit
2676 2676 bms = repo.nodebookmarks(old.node())
2677 2677 if bms:
2678 2678 marks = repo._bookmarks
2679 2679 for bm in bms:
2680 2680 ui.debug('moving bookmarks %r from %s to %s\n' %
2681 2681 (marks, old.hex(), hex(newid)))
2682 2682 marks[bm] = newid
2683 2683 marks.recordchange(tr)
2684 2684 #commit the whole amend process
2685 2685 if createmarkers:
2686 2686 # mark the new changeset as successor of the rewritten one
2687 2687 new = repo[newid]
2688 2688 obs = [(old, (new,))]
2689 2689 if node:
2690 2690 obs.append((ctx, ()))
2691 2691
2692 2692 obsolete.createmarkers(repo, obs)
2693 2693 tr.close()
2694 2694 finally:
2695 2695 tr.release()
2696 2696 if not createmarkers and newid != old.node():
2697 2697 # Strip the intermediate commit (if there was one) and the amended
2698 2698 # commit
2699 2699 if node:
2700 2700 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2701 2701 ui.note(_('stripping amended changeset %s\n') % old)
2702 2702 repair.strip(ui, repo, old.node(), topic='amend-backup')
2703 2703 finally:
2704 2704 lockmod.release(lock, wlock)
2705 2705 return newid
2706 2706
2707 2707 def commiteditor(repo, ctx, subs, editform=''):
2708 2708 if ctx.description():
2709 2709 return ctx.description()
2710 2710 return commitforceeditor(repo, ctx, subs, editform=editform,
2711 2711 unchangedmessagedetection=True)
2712 2712
2713 2713 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2714 2714 editform='', unchangedmessagedetection=False):
2715 2715 if not extramsg:
2716 2716 extramsg = _("Leave message empty to abort commit.")
2717 2717
2718 2718 forms = [e for e in editform.split('.') if e]
2719 2719 forms.insert(0, 'changeset')
2720 2720 templatetext = None
2721 2721 while forms:
2722 2722 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2723 2723 if tmpl:
2724 2724 templatetext = committext = buildcommittemplate(
2725 2725 repo, ctx, subs, extramsg, tmpl)
2726 2726 break
2727 2727 forms.pop()
2728 2728 else:
2729 2729 committext = buildcommittext(repo, ctx, subs, extramsg)
2730 2730
2731 2731 # run editor in the repository root
2732 2732 olddir = os.getcwd()
2733 2733 os.chdir(repo.root)
2734 2734
2735 2735 # make in-memory changes visible to external process
2736 2736 tr = repo.currenttransaction()
2737 2737 repo.dirstate.write(tr)
2738 2738 pending = tr and tr.writepending() and repo.root
2739 2739
2740 2740 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2741 2741 editform=editform, pending=pending)
2742 2742 text = re.sub("(?m)^HG:.*(\n|$)", "", editortext)
2743 2743 os.chdir(olddir)
2744 2744
2745 2745 if finishdesc:
2746 2746 text = finishdesc(text)
2747 2747 if not text.strip():
2748 2748 raise error.Abort(_("empty commit message"))
2749 2749 if unchangedmessagedetection and editortext == templatetext:
2750 2750 raise error.Abort(_("commit message unchanged"))
2751 2751
2752 2752 return text
2753 2753
2754 2754 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2755 2755 ui = repo.ui
2756 2756 tmpl, mapfile = gettemplate(ui, tmpl, None)
2757 2757
2758 2758 try:
2759 2759 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2760 2760 except SyntaxError as inst:
2761 2761 raise error.Abort(inst.args[0])
2762 2762
2763 2763 for k, v in repo.ui.configitems('committemplate'):
2764 2764 if k != 'changeset':
2765 2765 t.t.cache[k] = v
2766 2766
2767 2767 if not extramsg:
2768 2768 extramsg = '' # ensure that extramsg is string
2769 2769
2770 2770 ui.pushbuffer()
2771 2771 t.show(ctx, extramsg=extramsg)
2772 2772 return ui.popbuffer()
2773 2773
2774 2774 def hgprefix(msg):
2775 2775 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2776 2776
2777 2777 def buildcommittext(repo, ctx, subs, extramsg):
2778 2778 edittext = []
2779 2779 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2780 2780 if ctx.description():
2781 2781 edittext.append(ctx.description())
2782 2782 edittext.append("")
2783 2783 edittext.append("") # Empty line between message and comments.
2784 2784 edittext.append(hgprefix(_("Enter commit message."
2785 2785 " Lines beginning with 'HG:' are removed.")))
2786 2786 edittext.append(hgprefix(extramsg))
2787 2787 edittext.append("HG: --")
2788 2788 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2789 2789 if ctx.p2():
2790 2790 edittext.append(hgprefix(_("branch merge")))
2791 2791 if ctx.branch():
2792 2792 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2793 2793 if bookmarks.isactivewdirparent(repo):
2794 2794 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2795 2795 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2796 2796 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2797 2797 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2798 2798 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2799 2799 if not added and not modified and not removed:
2800 2800 edittext.append(hgprefix(_("no files changed")))
2801 2801 edittext.append("")
2802 2802
2803 2803 return "\n".join(edittext)
2804 2804
2805 2805 def commitstatus(repo, node, branch, bheads=None, opts=None):
2806 2806 if opts is None:
2807 2807 opts = {}
2808 2808 ctx = repo[node]
2809 2809 parents = ctx.parents()
2810 2810
2811 2811 if (not opts.get('amend') and bheads and node not in bheads and not
2812 2812 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2813 2813 repo.ui.status(_('created new head\n'))
2814 2814 # The message is not printed for initial roots. For the other
2815 2815 # changesets, it is printed in the following situations:
2816 2816 #
2817 2817 # Par column: for the 2 parents with ...
2818 2818 # N: null or no parent
2819 2819 # B: parent is on another named branch
2820 2820 # C: parent is a regular non head changeset
2821 2821 # H: parent was a branch head of the current branch
2822 2822 # Msg column: whether we print "created new head" message
2823 2823 # In the following, it is assumed that there already exists some
2824 2824 # initial branch heads of the current branch, otherwise nothing is
2825 2825 # printed anyway.
2826 2826 #
2827 2827 # Par Msg Comment
2828 2828 # N N y additional topo root
2829 2829 #
2830 2830 # B N y additional branch root
2831 2831 # C N y additional topo head
2832 2832 # H N n usual case
2833 2833 #
2834 2834 # B B y weird additional branch root
2835 2835 # C B y branch merge
2836 2836 # H B n merge with named branch
2837 2837 #
2838 2838 # C C y additional head from merge
2839 2839 # C H n merge with a head
2840 2840 #
2841 2841 # H H n head merge: head count decreases
2842 2842
2843 2843 if not opts.get('close_branch'):
2844 2844 for r in parents:
2845 2845 if r.closesbranch() and r.branch() == branch:
2846 2846 repo.ui.status(_('reopening closed branch head %d\n') % r)
2847 2847
2848 2848 if repo.ui.debugflag:
2849 2849 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2850 2850 elif repo.ui.verbose:
2851 2851 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2852 2852
2853 2853 def revert(ui, repo, ctx, parents, *pats, **opts):
2854 2854 parent, p2 = parents
2855 2855 node = ctx.node()
2856 2856
2857 2857 mf = ctx.manifest()
2858 2858 if node == p2:
2859 2859 parent = p2
2860 2860 if node == parent:
2861 2861 pmf = mf
2862 2862 else:
2863 2863 pmf = None
2864 2864
2865 2865 # need all matching names in dirstate and manifest of target rev,
2866 2866 # so have to walk both. do not print errors if files exist in one
2867 2867 # but not other. in both cases, filesets should be evaluated against
2868 2868 # workingctx to get consistent result (issue4497). this means 'set:**'
2869 2869 # cannot be used to select missing files from target rev.
2870 2870
2871 2871 # `names` is a mapping for all elements in working copy and target revision
2872 2872 # The mapping is in the form:
2873 2873 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2874 2874 names = {}
2875 2875
2876 2876 wlock = repo.wlock()
2877 2877 try:
2878 2878 ## filling of the `names` mapping
2879 2879 # walk dirstate to fill `names`
2880 2880
2881 2881 interactive = opts.get('interactive', False)
2882 2882 wctx = repo[None]
2883 2883 m = scmutil.match(wctx, pats, opts)
2884 2884
2885 2885 # we'll need this later
2886 2886 targetsubs = sorted(s for s in wctx.substate if m(s))
2887 2887
2888 2888 if not m.always():
2889 2889 for abs in repo.walk(matchmod.badmatch(m, lambda x, y: False)):
2890 2890 names[abs] = m.rel(abs), m.exact(abs)
2891 2891
2892 2892 # walk target manifest to fill `names`
2893 2893
2894 2894 def badfn(path, msg):
2895 2895 if path in names:
2896 2896 return
2897 2897 if path in ctx.substate:
2898 2898 return
2899 2899 path_ = path + '/'
2900 2900 for f in names:
2901 2901 if f.startswith(path_):
2902 2902 return
2903 2903 ui.warn("%s: %s\n" % (m.rel(path), msg))
2904 2904
2905 2905 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
2906 2906 if abs not in names:
2907 2907 names[abs] = m.rel(abs), m.exact(abs)
2908 2908
2909 2909 # Find status of all file in `names`.
2910 2910 m = scmutil.matchfiles(repo, names)
2911 2911
2912 2912 changes = repo.status(node1=node, match=m,
2913 2913 unknown=True, ignored=True, clean=True)
2914 2914 else:
2915 2915 changes = repo.status(node1=node, match=m)
2916 2916 for kind in changes:
2917 2917 for abs in kind:
2918 2918 names[abs] = m.rel(abs), m.exact(abs)
2919 2919
2920 2920 m = scmutil.matchfiles(repo, names)
2921 2921
2922 2922 modified = set(changes.modified)
2923 2923 added = set(changes.added)
2924 2924 removed = set(changes.removed)
2925 2925 _deleted = set(changes.deleted)
2926 2926 unknown = set(changes.unknown)
2927 2927 unknown.update(changes.ignored)
2928 2928 clean = set(changes.clean)
2929 2929 modadded = set()
2930 2930
2931 2931 # split between files known in target manifest and the others
2932 2932 smf = set(mf)
2933 2933
2934 2934 # determine the exact nature of the deleted changesets
2935 2935 deladded = _deleted - smf
2936 2936 deleted = _deleted - deladded
2937 2937
2938 2938 # We need to account for the state of the file in the dirstate,
2939 2939 # even when we revert against something else than parent. This will
2940 2940 # slightly alter the behavior of revert (doing back up or not, delete
2941 2941 # or just forget etc).
2942 2942 if parent == node:
2943 2943 dsmodified = modified
2944 2944 dsadded = added
2945 2945 dsremoved = removed
2946 2946 # store all local modifications, useful later for rename detection
2947 2947 localchanges = dsmodified | dsadded
2948 2948 modified, added, removed = set(), set(), set()
2949 2949 else:
2950 2950 changes = repo.status(node1=parent, match=m)
2951 2951 dsmodified = set(changes.modified)
2952 2952 dsadded = set(changes.added)
2953 2953 dsremoved = set(changes.removed)
2954 2954 # store all local modifications, useful later for rename detection
2955 2955 localchanges = dsmodified | dsadded
2956 2956
2957 2957 # only take into account for removes between wc and target
2958 2958 clean |= dsremoved - removed
2959 2959 dsremoved &= removed
2960 2960 # distinct between dirstate remove and other
2961 2961 removed -= dsremoved
2962 2962
2963 2963 modadded = added & dsmodified
2964 2964 added -= modadded
2965 2965
2966 2966 # tell newly modified apart.
2967 2967 dsmodified &= modified
2968 2968 dsmodified |= modified & dsadded # dirstate added may needs backup
2969 2969 modified -= dsmodified
2970 2970
2971 2971 # We need to wait for some post-processing to update this set
2972 2972 # before making the distinction. The dirstate will be used for
2973 2973 # that purpose.
2974 2974 dsadded = added
2975 2975
2976 2976 # in case of merge, files that are actually added can be reported as
2977 2977 # modified, we need to post process the result
2978 2978 if p2 != nullid:
2979 2979 if pmf is None:
2980 2980 # only need parent manifest in the merge case,
2981 2981 # so do not read by default
2982 2982 pmf = repo[parent].manifest()
2983 2983 mergeadd = dsmodified - set(pmf)
2984 2984 dsadded |= mergeadd
2985 2985 dsmodified -= mergeadd
2986 2986
2987 2987 # if f is a rename, update `names` to also revert the source
2988 2988 cwd = repo.getcwd()
2989 2989 for f in localchanges:
2990 2990 src = repo.dirstate.copied(f)
2991 2991 # XXX should we check for rename down to target node?
2992 2992 if src and src not in names and repo.dirstate[src] == 'r':
2993 2993 dsremoved.add(src)
2994 2994 names[src] = (repo.pathto(src, cwd), True)
2995 2995
2996 2996 # distinguish between file to forget and the other
2997 2997 added = set()
2998 2998 for abs in dsadded:
2999 2999 if repo.dirstate[abs] != 'a':
3000 3000 added.add(abs)
3001 3001 dsadded -= added
3002 3002
3003 3003 for abs in deladded:
3004 3004 if repo.dirstate[abs] == 'a':
3005 3005 dsadded.add(abs)
3006 3006 deladded -= dsadded
3007 3007
3008 3008 # For files marked as removed, we check if an unknown file is present at
3009 3009 # the same path. If a such file exists it may need to be backed up.
3010 3010 # Making the distinction at this stage helps have simpler backup
3011 3011 # logic.
3012 3012 removunk = set()
3013 3013 for abs in removed:
3014 3014 target = repo.wjoin(abs)
3015 3015 if os.path.lexists(target):
3016 3016 removunk.add(abs)
3017 3017 removed -= removunk
3018 3018
3019 3019 dsremovunk = set()
3020 3020 for abs in dsremoved:
3021 3021 target = repo.wjoin(abs)
3022 3022 if os.path.lexists(target):
3023 3023 dsremovunk.add(abs)
3024 3024 dsremoved -= dsremovunk
3025 3025
3026 3026 # action to be actually performed by revert
3027 3027 # (<list of file>, message>) tuple
3028 3028 actions = {'revert': ([], _('reverting %s\n')),
3029 3029 'add': ([], _('adding %s\n')),
3030 3030 'remove': ([], _('removing %s\n')),
3031 3031 'drop': ([], _('removing %s\n')),
3032 3032 'forget': ([], _('forgetting %s\n')),
3033 3033 'undelete': ([], _('undeleting %s\n')),
3034 3034 'noop': (None, _('no changes needed to %s\n')),
3035 3035 'unknown': (None, _('file not managed: %s\n')),
3036 3036 }
3037 3037
3038 3038 # "constant" that convey the backup strategy.
3039 3039 # All set to `discard` if `no-backup` is set do avoid checking
3040 3040 # no_backup lower in the code.
3041 3041 # These values are ordered for comparison purposes
3042 3042 backup = 2 # unconditionally do backup
3043 3043 check = 1 # check if the existing file differs from target
3044 3044 discard = 0 # never do backup
3045 3045 if opts.get('no_backup'):
3046 3046 backup = check = discard
3047 3047
3048 3048 backupanddel = actions['remove']
3049 3049 if not opts.get('no_backup'):
3050 3050 backupanddel = actions['drop']
3051 3051
3052 3052 disptable = (
3053 3053 # dispatch table:
3054 3054 # file state
3055 3055 # action
3056 3056 # make backup
3057 3057
3058 3058 ## Sets that results that will change file on disk
3059 3059 # Modified compared to target, no local change
3060 3060 (modified, actions['revert'], discard),
3061 3061 # Modified compared to target, but local file is deleted
3062 3062 (deleted, actions['revert'], discard),
3063 3063 # Modified compared to target, local change
3064 3064 (dsmodified, actions['revert'], backup),
3065 3065 # Added since target
3066 3066 (added, actions['remove'], discard),
3067 3067 # Added in working directory
3068 3068 (dsadded, actions['forget'], discard),
3069 3069 # Added since target, have local modification
3070 3070 (modadded, backupanddel, backup),
3071 3071 # Added since target but file is missing in working directory
3072 3072 (deladded, actions['drop'], discard),
3073 3073 # Removed since target, before working copy parent
3074 3074 (removed, actions['add'], discard),
3075 3075 # Same as `removed` but an unknown file exists at the same path
3076 3076 (removunk, actions['add'], check),
3077 3077 # Removed since targe, marked as such in working copy parent
3078 3078 (dsremoved, actions['undelete'], discard),
3079 3079 # Same as `dsremoved` but an unknown file exists at the same path
3080 3080 (dsremovunk, actions['undelete'], check),
3081 3081 ## the following sets does not result in any file changes
3082 3082 # File with no modification
3083 3083 (clean, actions['noop'], discard),
3084 3084 # Existing file, not tracked anywhere
3085 3085 (unknown, actions['unknown'], discard),
3086 3086 )
3087 3087
3088 3088 for abs, (rel, exact) in sorted(names.items()):
3089 3089 # target file to be touch on disk (relative to cwd)
3090 3090 target = repo.wjoin(abs)
3091 3091 # search the entry in the dispatch table.
3092 3092 # if the file is in any of these sets, it was touched in the working
3093 3093 # directory parent and we are sure it needs to be reverted.
3094 3094 for table, (xlist, msg), dobackup in disptable:
3095 3095 if abs not in table:
3096 3096 continue
3097 3097 if xlist is not None:
3098 3098 xlist.append(abs)
3099 3099 if dobackup and (backup <= dobackup
3100 3100 or wctx[abs].cmp(ctx[abs])):
3101 3101 bakname = origpath(ui, repo, rel)
3102 3102 ui.note(_('saving current version of %s as %s\n') %
3103 3103 (rel, bakname))
3104 3104 if not opts.get('dry_run'):
3105 3105 if interactive:
3106 3106 util.copyfile(target, bakname)
3107 3107 else:
3108 3108 util.rename(target, bakname)
3109 3109 if ui.verbose or not exact:
3110 3110 if not isinstance(msg, basestring):
3111 3111 msg = msg(abs)
3112 3112 ui.status(msg % rel)
3113 3113 elif exact:
3114 3114 ui.warn(msg % rel)
3115 3115 break
3116 3116
3117 3117 if not opts.get('dry_run'):
3118 3118 needdata = ('revert', 'add', 'undelete')
3119 3119 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3120 3120 _performrevert(repo, parents, ctx, actions, interactive)
3121 3121
3122 3122 if targetsubs:
3123 3123 # Revert the subrepos on the revert list
3124 3124 for sub in targetsubs:
3125 3125 try:
3126 3126 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3127 3127 except KeyError:
3128 3128 raise error.Abort("subrepository '%s' does not exist in %s!"
3129 3129 % (sub, short(ctx.node())))
3130 3130 finally:
3131 3131 wlock.release()
3132 3132
3133 3133 def origpath(ui, repo, filepath):
3134 3134 '''customize where .orig files are created
3135 3135
3136 3136 Fetch user defined path from config file: [ui] origbackuppath = <path>
3137 3137 Fall back to default (filepath) if not specified
3138 3138 '''
3139 3139 origbackuppath = ui.config('ui', 'origbackuppath', None)
3140 3140 if origbackuppath is None:
3141 3141 return filepath + ".orig"
3142 3142
3143 3143 filepathfromroot = os.path.relpath(filepath, start=repo.root)
3144 3144 fullorigpath = repo.wjoin(origbackuppath, filepathfromroot)
3145 3145
3146 3146 origbackupdir = repo.vfs.dirname(fullorigpath)
3147 3147 if not repo.vfs.exists(origbackupdir):
3148 3148 ui.note(_('creating directory: %s\n') % origbackupdir)
3149 3149 util.makedirs(origbackupdir)
3150 3150
3151 3151 return fullorigpath + ".orig"
3152 3152
3153 3153 def _revertprefetch(repo, ctx, *files):
3154 3154 """Let extension changing the storage layer prefetch content"""
3155 3155 pass
3156 3156
3157 3157 def _performrevert(repo, parents, ctx, actions, interactive=False):
3158 3158 """function that actually perform all the actions computed for revert
3159 3159
3160 3160 This is an independent function to let extension to plug in and react to
3161 3161 the imminent revert.
3162 3162
3163 3163 Make sure you have the working directory locked when calling this function.
3164 3164 """
3165 3165 parent, p2 = parents
3166 3166 node = ctx.node()
3167 3167 def checkout(f):
3168 3168 fc = ctx[f]
3169 3169 repo.wwrite(f, fc.data(), fc.flags())
3170 3170
3171 3171 audit_path = pathutil.pathauditor(repo.root)
3172 3172 for f in actions['forget'][0]:
3173 3173 repo.dirstate.drop(f)
3174 3174 for f in actions['remove'][0]:
3175 3175 audit_path(f)
3176 3176 try:
3177 3177 util.unlinkpath(repo.wjoin(f))
3178 3178 except OSError:
3179 3179 pass
3180 3180 repo.dirstate.remove(f)
3181 3181 for f in actions['drop'][0]:
3182 3182 audit_path(f)
3183 3183 repo.dirstate.remove(f)
3184 3184
3185 3185 normal = None
3186 3186 if node == parent:
3187 3187 # We're reverting to our parent. If possible, we'd like status
3188 3188 # to report the file as clean. We have to use normallookup for
3189 3189 # merges to avoid losing information about merged/dirty files.
3190 3190 if p2 != nullid:
3191 3191 normal = repo.dirstate.normallookup
3192 3192 else:
3193 3193 normal = repo.dirstate.normal
3194 3194
3195 3195 newlyaddedandmodifiedfiles = set()
3196 3196 if interactive:
3197 3197 # Prompt the user for changes to revert
3198 3198 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3199 3199 m = scmutil.match(ctx, torevert, {})
3200 3200 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3201 3201 diffopts.nodates = True
3202 3202 diffopts.git = True
3203 3203 reversehunks = repo.ui.configbool('experimental',
3204 3204 'revertalternateinteractivemode',
3205 3205 True)
3206 3206 if reversehunks:
3207 3207 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3208 3208 else:
3209 3209 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3210 3210 originalchunks = patch.parsepatch(diff)
3211 3211
3212 3212 try:
3213 3213
3214 3214 chunks, opts = recordfilter(repo.ui, originalchunks)
3215 3215 if reversehunks:
3216 3216 chunks = patch.reversehunks(chunks)
3217 3217
3218 3218 except patch.PatchError as err:
3219 3219 raise error.Abort(_('error parsing patch: %s') % err)
3220 3220
3221 3221 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3222 3222 # Apply changes
3223 3223 fp = cStringIO.StringIO()
3224 3224 for c in chunks:
3225 3225 c.write(fp)
3226 3226 dopatch = fp.tell()
3227 3227 fp.seek(0)
3228 3228 if dopatch:
3229 3229 try:
3230 3230 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3231 3231 except patch.PatchError as err:
3232 3232 raise error.Abort(str(err))
3233 3233 del fp
3234 3234 else:
3235 3235 for f in actions['revert'][0]:
3236 3236 checkout(f)
3237 3237 if normal:
3238 3238 normal(f)
3239 3239
3240 3240 for f in actions['add'][0]:
3241 3241 # Don't checkout modified files, they are already created by the diff
3242 3242 if f not in newlyaddedandmodifiedfiles:
3243 3243 checkout(f)
3244 3244 repo.dirstate.add(f)
3245 3245
3246 3246 normal = repo.dirstate.normallookup
3247 3247 if node == parent and p2 == nullid:
3248 3248 normal = repo.dirstate.normal
3249 3249 for f in actions['undelete'][0]:
3250 3250 checkout(f)
3251 3251 normal(f)
3252 3252
3253 3253 copied = copies.pathcopies(repo[parent], ctx)
3254 3254
3255 3255 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3256 3256 if f in copied:
3257 3257 repo.dirstate.copy(copied[f], f)
3258 3258
3259 3259 def command(table):
3260 3260 """Returns a function object to be used as a decorator for making commands.
3261 3261
3262 3262 This function receives a command table as its argument. The table should
3263 3263 be a dict.
3264 3264
3265 3265 The returned function can be used as a decorator for adding commands
3266 3266 to that command table. This function accepts multiple arguments to define
3267 3267 a command.
3268 3268
3269 3269 The first argument is the command name.
3270 3270
3271 3271 The options argument is an iterable of tuples defining command arguments.
3272 3272 See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
3273 3273
3274 3274 The synopsis argument defines a short, one line summary of how to use the
3275 3275 command. This shows up in the help output.
3276 3276
3277 3277 The norepo argument defines whether the command does not require a
3278 3278 local repository. Most commands operate against a repository, thus the
3279 3279 default is False.
3280 3280
3281 3281 The optionalrepo argument defines whether the command optionally requires
3282 3282 a local repository.
3283 3283
3284 3284 The inferrepo argument defines whether to try to find a repository from the
3285 3285 command line arguments. If True, arguments will be examined for potential
3286 3286 repository locations. See ``findrepo()``. If a repository is found, it
3287 3287 will be used.
3288 3288 """
3289 3289 def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False,
3290 3290 inferrepo=False):
3291 3291 def decorator(func):
3292 3292 if synopsis:
3293 3293 table[name] = func, list(options), synopsis
3294 3294 else:
3295 3295 table[name] = func, list(options)
3296 3296
3297 3297 if norepo:
3298 3298 # Avoid import cycle.
3299 3299 import commands
3300 3300 commands.norepo += ' %s' % ' '.join(parsealiases(name))
3301 3301
3302 3302 if optionalrepo:
3303 3303 import commands
3304 3304 commands.optionalrepo += ' %s' % ' '.join(parsealiases(name))
3305 3305
3306 3306 if inferrepo:
3307 3307 import commands
3308 3308 commands.inferrepo += ' %s' % ' '.join(parsealiases(name))
3309 3309
3310 3310 return func
3311 3311 return decorator
3312 3312
3313 3313 return cmd
3314 3314
3315 3315 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3316 3316 # commands.outgoing. "missing" is "missing" of the result of
3317 3317 # "findcommonoutgoing()"
3318 3318 outgoinghooks = util.hooks()
3319 3319
3320 3320 # a list of (ui, repo) functions called by commands.summary
3321 3321 summaryhooks = util.hooks()
3322 3322
3323 3323 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3324 3324 #
3325 3325 # functions should return tuple of booleans below, if 'changes' is None:
3326 3326 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3327 3327 #
3328 3328 # otherwise, 'changes' is a tuple of tuples below:
3329 3329 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3330 3330 # - (desturl, destbranch, destpeer, outgoing)
3331 3331 summaryremotehooks = util.hooks()
3332 3332
3333 3333 # A list of state files kept by multistep operations like graft.
3334 3334 # Since graft cannot be aborted, it is considered 'clearable' by update.
3335 3335 # note: bisect is intentionally excluded
3336 3336 # (state file, clearable, allowcommit, error, hint)
3337 3337 unfinishedstates = [
3338 3338 ('graftstate', True, False, _('graft in progress'),
3339 3339 _("use 'hg graft --continue' or 'hg update' to abort")),
3340 3340 ('updatestate', True, False, _('last update was interrupted'),
3341 3341 _("use 'hg update' to get a consistent checkout"))
3342 3342 ]
3343 3343
3344 3344 def checkunfinished(repo, commit=False):
3345 3345 '''Look for an unfinished multistep operation, like graft, and abort
3346 3346 if found. It's probably good to check this right before
3347 3347 bailifchanged().
3348 3348 '''
3349 3349 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3350 3350 if commit and allowcommit:
3351 3351 continue
3352 3352 if repo.vfs.exists(f):
3353 3353 raise error.Abort(msg, hint=hint)
3354 3354
3355 3355 def clearunfinished(repo):
3356 3356 '''Check for unfinished operations (as above), and clear the ones
3357 3357 that are clearable.
3358 3358 '''
3359 3359 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3360 3360 if not clearable and repo.vfs.exists(f):
3361 3361 raise error.Abort(msg, hint=hint)
3362 3362 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3363 3363 if clearable and repo.vfs.exists(f):
3364 3364 util.unlink(repo.join(f))
3365 3365
3366 3366 class dirstateguard(object):
3367 3367 '''Restore dirstate at unexpected failure.
3368 3368
3369 3369 At the construction, this class does:
3370 3370
3371 3371 - write current ``repo.dirstate`` out, and
3372 3372 - save ``.hg/dirstate`` into the backup file
3373 3373
3374 3374 This restores ``.hg/dirstate`` from backup file, if ``release()``
3375 3375 is invoked before ``close()``.
3376 3376
3377 3377 This just removes the backup file at ``close()`` before ``release()``.
3378 3378 '''
3379 3379
3380 3380 def __init__(self, repo, name):
3381 3381 self._repo = repo
3382 3382 self._suffix = '.backup.%s.%d' % (name, id(self))
3383 3383 repo.dirstate._savebackup(repo.currenttransaction(), self._suffix)
3384 3384 self._active = True
3385 3385 self._closed = False
3386 3386
3387 3387 def __del__(self):
3388 3388 if self._active: # still active
3389 3389 # this may occur, even if this class is used correctly:
3390 3390 # for example, releasing other resources like transaction
3391 3391 # may raise exception before ``dirstateguard.release`` in
3392 3392 # ``release(tr, ....)``.
3393 3393 self._abort()
3394 3394
3395 3395 def close(self):
3396 3396 if not self._active: # already inactivated
3397 3397 msg = (_("can't close already inactivated backup: dirstate%s")
3398 3398 % self._suffix)
3399 3399 raise error.Abort(msg)
3400 3400
3401 3401 self._repo.dirstate._clearbackup(self._repo.currenttransaction(),
3402 3402 self._suffix)
3403 3403 self._active = False
3404 3404 self._closed = True
3405 3405
3406 3406 def _abort(self):
3407 3407 self._repo.dirstate._restorebackup(self._repo.currenttransaction(),
3408 3408 self._suffix)
3409 3409 self._active = False
3410 3410
3411 3411 def release(self):
3412 3412 if not self._closed:
3413 3413 if not self._active: # already inactivated
3414 3414 msg = (_("can't release already inactivated backup:"
3415 3415 " dirstate%s")
3416 3416 % self._suffix)
3417 3417 raise error.Abort(msg)
3418 3418 self._abort()
General Comments 0
You need to be logged in to leave comments. Login now