##// END OF EJS Templates
cmdutil: use crecordmod.checkcurses...
Sean Farley -
r27531:84d686cb stable
parent child Browse files
Show More
@@ -1,3378 +1,3378
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import hex, bin, nullid, nullrev, short
9 9 from i18n import _
10 10 import os, sys, errno, re, tempfile, cStringIO
11 11 import util, scmutil, templater, patch, error, templatekw, revlog, copies
12 12 import match as matchmod
13 13 import repair, graphmod, revset, phases, obsolete, pathutil
14 14 import changelog
15 15 import bookmarks
16 16 import encoding
17 17 import formatter
18 18 import crecord as crecordmod
19 19 import lock as lockmod
20 20
21 21 def ishunk(x):
22 22 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
23 23 return isinstance(x, hunkclasses)
24 24
25 25 def newandmodified(chunks, originalchunks):
26 26 newlyaddedandmodifiedfiles = set()
27 27 for chunk in chunks:
28 28 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
29 29 originalchunks:
30 30 newlyaddedandmodifiedfiles.add(chunk.header.filename())
31 31 return newlyaddedandmodifiedfiles
32 32
33 33 def parsealiases(cmd):
34 34 return cmd.lstrip("^").split("|")
35 35
36 36 def setupwrapcolorwrite(ui):
37 37 # wrap ui.write so diff output can be labeled/colorized
38 38 def wrapwrite(orig, *args, **kw):
39 39 label = kw.pop('label', '')
40 40 for chunk, l in patch.difflabel(lambda: args):
41 41 orig(chunk, label=label + l)
42 42
43 43 oldwrite = ui.write
44 44 def wrap(*args, **kwargs):
45 45 return wrapwrite(oldwrite, *args, **kwargs)
46 46 setattr(ui, 'write', wrap)
47 47 return oldwrite
48 48
49 49 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
50 50 if usecurses:
51 51 if testfile:
52 52 recordfn = crecordmod.testdecorator(testfile,
53 53 crecordmod.testchunkselector)
54 54 else:
55 55 recordfn = crecordmod.chunkselector
56 56
57 57 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
58 58
59 59 else:
60 60 return patch.filterpatch(ui, originalhunks, operation)
61 61
62 62 def recordfilter(ui, originalhunks, operation=None):
63 63 """ Prompts the user to filter the originalhunks and return a list of
64 64 selected hunks.
65 65 *operation* is used for ui purposes to indicate the user
66 66 what kind of filtering they are doing: reverting, committing, shelving, etc.
67 67 *operation* has to be a translated string.
68 68 """
69 usecurses = ui.configbool('experimental', 'crecord', False)
69 usecurses = crecordmod.checkcurses(ui)
70 70 testfile = ui.config('experimental', 'crecordtest', None)
71 71 oldwrite = setupwrapcolorwrite(ui)
72 72 try:
73 73 newchunks = filterchunks(ui, originalhunks, usecurses, testfile,
74 74 operation)
75 75 finally:
76 76 ui.write = oldwrite
77 77 return newchunks
78 78
79 79 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
80 80 filterfn, *pats, **opts):
81 81 import merge as mergemod
82 82
83 83 if not ui.interactive():
84 84 if cmdsuggest:
85 85 msg = _('running non-interactively, use %s instead') % cmdsuggest
86 86 else:
87 87 msg = _('running non-interactively')
88 88 raise error.Abort(msg)
89 89
90 90 # make sure username is set before going interactive
91 91 if not opts.get('user'):
92 92 ui.username() # raise exception, username not provided
93 93
94 94 def recordfunc(ui, repo, message, match, opts):
95 95 """This is generic record driver.
96 96
97 97 Its job is to interactively filter local changes, and
98 98 accordingly prepare working directory into a state in which the
99 99 job can be delegated to a non-interactive commit command such as
100 100 'commit' or 'qrefresh'.
101 101
102 102 After the actual job is done by non-interactive command, the
103 103 working directory is restored to its original state.
104 104
105 105 In the end we'll record interesting changes, and everything else
106 106 will be left in place, so the user can continue working.
107 107 """
108 108
109 109 checkunfinished(repo, commit=True)
110 110 merge = len(repo[None].parents()) > 1
111 111 if merge:
112 112 raise error.Abort(_('cannot partially commit a merge '
113 113 '(use "hg commit" instead)'))
114 114
115 115 status = repo.status(match=match)
116 116 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
117 117 diffopts.nodates = True
118 118 diffopts.git = True
119 119 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
120 120 originalchunks = patch.parsepatch(originaldiff)
121 121
122 122 # 1. filter patch, so we have intending-to apply subset of it
123 123 try:
124 124 chunks = filterfn(ui, originalchunks)
125 125 except patch.PatchError as err:
126 126 raise error.Abort(_('error parsing patch: %s') % err)
127 127
128 128 # We need to keep a backup of files that have been newly added and
129 129 # modified during the recording process because there is a previous
130 130 # version without the edit in the workdir
131 131 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
132 132 contenders = set()
133 133 for h in chunks:
134 134 try:
135 135 contenders.update(set(h.files()))
136 136 except AttributeError:
137 137 pass
138 138
139 139 changed = status.modified + status.added + status.removed
140 140 newfiles = [f for f in changed if f in contenders]
141 141 if not newfiles:
142 142 ui.status(_('no changes to record\n'))
143 143 return 0
144 144
145 145 modified = set(status.modified)
146 146
147 147 # 2. backup changed files, so we can restore them in the end
148 148
149 149 if backupall:
150 150 tobackup = changed
151 151 else:
152 152 tobackup = [f for f in newfiles if f in modified or f in \
153 153 newlyaddedandmodifiedfiles]
154 154 backups = {}
155 155 if tobackup:
156 156 backupdir = repo.join('record-backups')
157 157 try:
158 158 os.mkdir(backupdir)
159 159 except OSError as err:
160 160 if err.errno != errno.EEXIST:
161 161 raise
162 162 try:
163 163 # backup continues
164 164 for f in tobackup:
165 165 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
166 166 dir=backupdir)
167 167 os.close(fd)
168 168 ui.debug('backup %r as %r\n' % (f, tmpname))
169 169 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
170 170 backups[f] = tmpname
171 171
172 172 fp = cStringIO.StringIO()
173 173 for c in chunks:
174 174 fname = c.filename()
175 175 if fname in backups:
176 176 c.write(fp)
177 177 dopatch = fp.tell()
178 178 fp.seek(0)
179 179
180 180 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
181 181 # 3a. apply filtered patch to clean repo (clean)
182 182 if backups:
183 183 # Equivalent to hg.revert
184 184 choices = lambda key: key in backups
185 185 mergemod.update(repo, repo.dirstate.p1(),
186 186 False, True, choices)
187 187
188 188 # 3b. (apply)
189 189 if dopatch:
190 190 try:
191 191 ui.debug('applying patch\n')
192 192 ui.debug(fp.getvalue())
193 193 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
194 194 except patch.PatchError as err:
195 195 raise error.Abort(str(err))
196 196 del fp
197 197
198 198 # 4. We prepared working directory according to filtered
199 199 # patch. Now is the time to delegate the job to
200 200 # commit/qrefresh or the like!
201 201
202 202 # Make all of the pathnames absolute.
203 203 newfiles = [repo.wjoin(nf) for nf in newfiles]
204 204 return commitfunc(ui, repo, *newfiles, **opts)
205 205 finally:
206 206 # 5. finally restore backed-up files
207 207 try:
208 208 dirstate = repo.dirstate
209 209 for realname, tmpname in backups.iteritems():
210 210 ui.debug('restoring %r to %r\n' % (tmpname, realname))
211 211
212 212 if dirstate[realname] == 'n':
213 213 # without normallookup, restoring timestamp
214 214 # may cause partially committed files
215 215 # to be treated as unmodified
216 216 dirstate.normallookup(realname)
217 217
218 218 # copystat=True here and above are a hack to trick any
219 219 # editors that have f open that we haven't modified them.
220 220 #
221 221 # Also note that this racy as an editor could notice the
222 222 # file's mtime before we've finished writing it.
223 223 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
224 224 os.unlink(tmpname)
225 225 if tobackup:
226 226 os.rmdir(backupdir)
227 227 except OSError:
228 228 pass
229 229
230 230 def recordinwlock(ui, repo, message, match, opts):
231 231 wlock = repo.wlock()
232 232 try:
233 233 return recordfunc(ui, repo, message, match, opts)
234 234 finally:
235 235 wlock.release()
236 236
237 237 return commit(ui, repo, recordinwlock, pats, opts)
238 238
239 239 def findpossible(cmd, table, strict=False):
240 240 """
241 241 Return cmd -> (aliases, command table entry)
242 242 for each matching command.
243 243 Return debug commands (or their aliases) only if no normal command matches.
244 244 """
245 245 choice = {}
246 246 debugchoice = {}
247 247
248 248 if cmd in table:
249 249 # short-circuit exact matches, "log" alias beats "^log|history"
250 250 keys = [cmd]
251 251 else:
252 252 keys = table.keys()
253 253
254 254 allcmds = []
255 255 for e in keys:
256 256 aliases = parsealiases(e)
257 257 allcmds.extend(aliases)
258 258 found = None
259 259 if cmd in aliases:
260 260 found = cmd
261 261 elif not strict:
262 262 for a in aliases:
263 263 if a.startswith(cmd):
264 264 found = a
265 265 break
266 266 if found is not None:
267 267 if aliases[0].startswith("debug") or found.startswith("debug"):
268 268 debugchoice[found] = (aliases, table[e])
269 269 else:
270 270 choice[found] = (aliases, table[e])
271 271
272 272 if not choice and debugchoice:
273 273 choice = debugchoice
274 274
275 275 return choice, allcmds
276 276
277 277 def findcmd(cmd, table, strict=True):
278 278 """Return (aliases, command table entry) for command string."""
279 279 choice, allcmds = findpossible(cmd, table, strict)
280 280
281 281 if cmd in choice:
282 282 return choice[cmd]
283 283
284 284 if len(choice) > 1:
285 285 clist = choice.keys()
286 286 clist.sort()
287 287 raise error.AmbiguousCommand(cmd, clist)
288 288
289 289 if choice:
290 290 return choice.values()[0]
291 291
292 292 raise error.UnknownCommand(cmd, allcmds)
293 293
294 294 def findrepo(p):
295 295 while not os.path.isdir(os.path.join(p, ".hg")):
296 296 oldp, p = p, os.path.dirname(p)
297 297 if p == oldp:
298 298 return None
299 299
300 300 return p
301 301
302 302 def bailifchanged(repo, merge=True):
303 303 if merge and repo.dirstate.p2() != nullid:
304 304 raise error.Abort(_('outstanding uncommitted merge'))
305 305 modified, added, removed, deleted = repo.status()[:4]
306 306 if modified or added or removed or deleted:
307 307 raise error.Abort(_('uncommitted changes'))
308 308 ctx = repo[None]
309 309 for s in sorted(ctx.substate):
310 310 ctx.sub(s).bailifchanged()
311 311
312 312 def logmessage(ui, opts):
313 313 """ get the log message according to -m and -l option """
314 314 message = opts.get('message')
315 315 logfile = opts.get('logfile')
316 316
317 317 if message and logfile:
318 318 raise error.Abort(_('options --message and --logfile are mutually '
319 319 'exclusive'))
320 320 if not message and logfile:
321 321 try:
322 322 if logfile == '-':
323 323 message = ui.fin.read()
324 324 else:
325 325 message = '\n'.join(util.readfile(logfile).splitlines())
326 326 except IOError as inst:
327 327 raise error.Abort(_("can't read commit message '%s': %s") %
328 328 (logfile, inst.strerror))
329 329 return message
330 330
331 331 def mergeeditform(ctxorbool, baseformname):
332 332 """return appropriate editform name (referencing a committemplate)
333 333
334 334 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
335 335 merging is committed.
336 336
337 337 This returns baseformname with '.merge' appended if it is a merge,
338 338 otherwise '.normal' is appended.
339 339 """
340 340 if isinstance(ctxorbool, bool):
341 341 if ctxorbool:
342 342 return baseformname + ".merge"
343 343 elif 1 < len(ctxorbool.parents()):
344 344 return baseformname + ".merge"
345 345
346 346 return baseformname + ".normal"
347 347
348 348 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
349 349 editform='', **opts):
350 350 """get appropriate commit message editor according to '--edit' option
351 351
352 352 'finishdesc' is a function to be called with edited commit message
353 353 (= 'description' of the new changeset) just after editing, but
354 354 before checking empty-ness. It should return actual text to be
355 355 stored into history. This allows to change description before
356 356 storing.
357 357
358 358 'extramsg' is a extra message to be shown in the editor instead of
359 359 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
360 360 is automatically added.
361 361
362 362 'editform' is a dot-separated list of names, to distinguish
363 363 the purpose of commit text editing.
364 364
365 365 'getcommiteditor' returns 'commitforceeditor' regardless of
366 366 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
367 367 they are specific for usage in MQ.
368 368 """
369 369 if edit or finishdesc or extramsg:
370 370 return lambda r, c, s: commitforceeditor(r, c, s,
371 371 finishdesc=finishdesc,
372 372 extramsg=extramsg,
373 373 editform=editform)
374 374 elif editform:
375 375 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
376 376 else:
377 377 return commiteditor
378 378
379 379 def loglimit(opts):
380 380 """get the log limit according to option -l/--limit"""
381 381 limit = opts.get('limit')
382 382 if limit:
383 383 try:
384 384 limit = int(limit)
385 385 except ValueError:
386 386 raise error.Abort(_('limit must be a positive integer'))
387 387 if limit <= 0:
388 388 raise error.Abort(_('limit must be positive'))
389 389 else:
390 390 limit = None
391 391 return limit
392 392
393 393 def makefilename(repo, pat, node, desc=None,
394 394 total=None, seqno=None, revwidth=None, pathname=None):
395 395 node_expander = {
396 396 'H': lambda: hex(node),
397 397 'R': lambda: str(repo.changelog.rev(node)),
398 398 'h': lambda: short(node),
399 399 'm': lambda: re.sub('[^\w]', '_', str(desc))
400 400 }
401 401 expander = {
402 402 '%': lambda: '%',
403 403 'b': lambda: os.path.basename(repo.root),
404 404 }
405 405
406 406 try:
407 407 if node:
408 408 expander.update(node_expander)
409 409 if node:
410 410 expander['r'] = (lambda:
411 411 str(repo.changelog.rev(node)).zfill(revwidth or 0))
412 412 if total is not None:
413 413 expander['N'] = lambda: str(total)
414 414 if seqno is not None:
415 415 expander['n'] = lambda: str(seqno)
416 416 if total is not None and seqno is not None:
417 417 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
418 418 if pathname is not None:
419 419 expander['s'] = lambda: os.path.basename(pathname)
420 420 expander['d'] = lambda: os.path.dirname(pathname) or '.'
421 421 expander['p'] = lambda: pathname
422 422
423 423 newname = []
424 424 patlen = len(pat)
425 425 i = 0
426 426 while i < patlen:
427 427 c = pat[i]
428 428 if c == '%':
429 429 i += 1
430 430 c = pat[i]
431 431 c = expander[c]()
432 432 newname.append(c)
433 433 i += 1
434 434 return ''.join(newname)
435 435 except KeyError as inst:
436 436 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
437 437 inst.args[0])
438 438
439 439 def makefileobj(repo, pat, node=None, desc=None, total=None,
440 440 seqno=None, revwidth=None, mode='wb', modemap=None,
441 441 pathname=None):
442 442
443 443 writable = mode not in ('r', 'rb')
444 444
445 445 if not pat or pat == '-':
446 446 if writable:
447 447 fp = repo.ui.fout
448 448 else:
449 449 fp = repo.ui.fin
450 450 if util.safehasattr(fp, 'fileno'):
451 451 return os.fdopen(os.dup(fp.fileno()), mode)
452 452 else:
453 453 # if this fp can't be duped properly, return
454 454 # a dummy object that can be closed
455 455 class wrappedfileobj(object):
456 456 noop = lambda x: None
457 457 def __init__(self, f):
458 458 self.f = f
459 459 def __getattr__(self, attr):
460 460 if attr == 'close':
461 461 return self.noop
462 462 else:
463 463 return getattr(self.f, attr)
464 464
465 465 return wrappedfileobj(fp)
466 466 if util.safehasattr(pat, 'write') and writable:
467 467 return pat
468 468 if util.safehasattr(pat, 'read') and 'r' in mode:
469 469 return pat
470 470 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
471 471 if modemap is not None:
472 472 mode = modemap.get(fn, mode)
473 473 if mode == 'wb':
474 474 modemap[fn] = 'ab'
475 475 return open(fn, mode)
476 476
477 477 def openrevlog(repo, cmd, file_, opts):
478 478 """opens the changelog, manifest, a filelog or a given revlog"""
479 479 cl = opts['changelog']
480 480 mf = opts['manifest']
481 481 dir = opts['dir']
482 482 msg = None
483 483 if cl and mf:
484 484 msg = _('cannot specify --changelog and --manifest at the same time')
485 485 elif cl and dir:
486 486 msg = _('cannot specify --changelog and --dir at the same time')
487 487 elif cl or mf:
488 488 if file_:
489 489 msg = _('cannot specify filename with --changelog or --manifest')
490 490 elif not repo:
491 491 msg = _('cannot specify --changelog or --manifest or --dir '
492 492 'without a repository')
493 493 if msg:
494 494 raise error.Abort(msg)
495 495
496 496 r = None
497 497 if repo:
498 498 if cl:
499 499 r = repo.unfiltered().changelog
500 500 elif dir:
501 501 if 'treemanifest' not in repo.requirements:
502 502 raise error.Abort(_("--dir can only be used on repos with "
503 503 "treemanifest enabled"))
504 504 dirlog = repo.dirlog(file_)
505 505 if len(dirlog):
506 506 r = dirlog
507 507 elif mf:
508 508 r = repo.manifest
509 509 elif file_:
510 510 filelog = repo.file(file_)
511 511 if len(filelog):
512 512 r = filelog
513 513 if not r:
514 514 if not file_:
515 515 raise error.CommandError(cmd, _('invalid arguments'))
516 516 if not os.path.isfile(file_):
517 517 raise error.Abort(_("revlog '%s' not found") % file_)
518 518 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
519 519 file_[:-2] + ".i")
520 520 return r
521 521
522 522 def copy(ui, repo, pats, opts, rename=False):
523 523 # called with the repo lock held
524 524 #
525 525 # hgsep => pathname that uses "/" to separate directories
526 526 # ossep => pathname that uses os.sep to separate directories
527 527 cwd = repo.getcwd()
528 528 targets = {}
529 529 after = opts.get("after")
530 530 dryrun = opts.get("dry_run")
531 531 wctx = repo[None]
532 532
533 533 def walkpat(pat):
534 534 srcs = []
535 535 if after:
536 536 badstates = '?'
537 537 else:
538 538 badstates = '?r'
539 539 m = scmutil.match(repo[None], [pat], opts, globbed=True)
540 540 for abs in repo.walk(m):
541 541 state = repo.dirstate[abs]
542 542 rel = m.rel(abs)
543 543 exact = m.exact(abs)
544 544 if state in badstates:
545 545 if exact and state == '?':
546 546 ui.warn(_('%s: not copying - file is not managed\n') % rel)
547 547 if exact and state == 'r':
548 548 ui.warn(_('%s: not copying - file has been marked for'
549 549 ' remove\n') % rel)
550 550 continue
551 551 # abs: hgsep
552 552 # rel: ossep
553 553 srcs.append((abs, rel, exact))
554 554 return srcs
555 555
556 556 # abssrc: hgsep
557 557 # relsrc: ossep
558 558 # otarget: ossep
559 559 def copyfile(abssrc, relsrc, otarget, exact):
560 560 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
561 561 if '/' in abstarget:
562 562 # We cannot normalize abstarget itself, this would prevent
563 563 # case only renames, like a => A.
564 564 abspath, absname = abstarget.rsplit('/', 1)
565 565 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
566 566 reltarget = repo.pathto(abstarget, cwd)
567 567 target = repo.wjoin(abstarget)
568 568 src = repo.wjoin(abssrc)
569 569 state = repo.dirstate[abstarget]
570 570
571 571 scmutil.checkportable(ui, abstarget)
572 572
573 573 # check for collisions
574 574 prevsrc = targets.get(abstarget)
575 575 if prevsrc is not None:
576 576 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
577 577 (reltarget, repo.pathto(abssrc, cwd),
578 578 repo.pathto(prevsrc, cwd)))
579 579 return
580 580
581 581 # check for overwrites
582 582 exists = os.path.lexists(target)
583 583 samefile = False
584 584 if exists and abssrc != abstarget:
585 585 if (repo.dirstate.normalize(abssrc) ==
586 586 repo.dirstate.normalize(abstarget)):
587 587 if not rename:
588 588 ui.warn(_("%s: can't copy - same file\n") % reltarget)
589 589 return
590 590 exists = False
591 591 samefile = True
592 592
593 593 if not after and exists or after and state in 'mn':
594 594 if not opts['force']:
595 595 ui.warn(_('%s: not overwriting - file exists\n') %
596 596 reltarget)
597 597 return
598 598
599 599 if after:
600 600 if not exists:
601 601 if rename:
602 602 ui.warn(_('%s: not recording move - %s does not exist\n') %
603 603 (relsrc, reltarget))
604 604 else:
605 605 ui.warn(_('%s: not recording copy - %s does not exist\n') %
606 606 (relsrc, reltarget))
607 607 return
608 608 elif not dryrun:
609 609 try:
610 610 if exists:
611 611 os.unlink(target)
612 612 targetdir = os.path.dirname(target) or '.'
613 613 if not os.path.isdir(targetdir):
614 614 os.makedirs(targetdir)
615 615 if samefile:
616 616 tmp = target + "~hgrename"
617 617 os.rename(src, tmp)
618 618 os.rename(tmp, target)
619 619 else:
620 620 util.copyfile(src, target)
621 621 srcexists = True
622 622 except IOError as inst:
623 623 if inst.errno == errno.ENOENT:
624 624 ui.warn(_('%s: deleted in working directory\n') % relsrc)
625 625 srcexists = False
626 626 else:
627 627 ui.warn(_('%s: cannot copy - %s\n') %
628 628 (relsrc, inst.strerror))
629 629 return True # report a failure
630 630
631 631 if ui.verbose or not exact:
632 632 if rename:
633 633 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
634 634 else:
635 635 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
636 636
637 637 targets[abstarget] = abssrc
638 638
639 639 # fix up dirstate
640 640 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
641 641 dryrun=dryrun, cwd=cwd)
642 642 if rename and not dryrun:
643 643 if not after and srcexists and not samefile:
644 644 util.unlinkpath(repo.wjoin(abssrc))
645 645 wctx.forget([abssrc])
646 646
647 647 # pat: ossep
648 648 # dest ossep
649 649 # srcs: list of (hgsep, hgsep, ossep, bool)
650 650 # return: function that takes hgsep and returns ossep
651 651 def targetpathfn(pat, dest, srcs):
652 652 if os.path.isdir(pat):
653 653 abspfx = pathutil.canonpath(repo.root, cwd, pat)
654 654 abspfx = util.localpath(abspfx)
655 655 if destdirexists:
656 656 striplen = len(os.path.split(abspfx)[0])
657 657 else:
658 658 striplen = len(abspfx)
659 659 if striplen:
660 660 striplen += len(os.sep)
661 661 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
662 662 elif destdirexists:
663 663 res = lambda p: os.path.join(dest,
664 664 os.path.basename(util.localpath(p)))
665 665 else:
666 666 res = lambda p: dest
667 667 return res
668 668
669 669 # pat: ossep
670 670 # dest ossep
671 671 # srcs: list of (hgsep, hgsep, ossep, bool)
672 672 # return: function that takes hgsep and returns ossep
673 673 def targetpathafterfn(pat, dest, srcs):
674 674 if matchmod.patkind(pat):
675 675 # a mercurial pattern
676 676 res = lambda p: os.path.join(dest,
677 677 os.path.basename(util.localpath(p)))
678 678 else:
679 679 abspfx = pathutil.canonpath(repo.root, cwd, pat)
680 680 if len(abspfx) < len(srcs[0][0]):
681 681 # A directory. Either the target path contains the last
682 682 # component of the source path or it does not.
683 683 def evalpath(striplen):
684 684 score = 0
685 685 for s in srcs:
686 686 t = os.path.join(dest, util.localpath(s[0])[striplen:])
687 687 if os.path.lexists(t):
688 688 score += 1
689 689 return score
690 690
691 691 abspfx = util.localpath(abspfx)
692 692 striplen = len(abspfx)
693 693 if striplen:
694 694 striplen += len(os.sep)
695 695 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
696 696 score = evalpath(striplen)
697 697 striplen1 = len(os.path.split(abspfx)[0])
698 698 if striplen1:
699 699 striplen1 += len(os.sep)
700 700 if evalpath(striplen1) > score:
701 701 striplen = striplen1
702 702 res = lambda p: os.path.join(dest,
703 703 util.localpath(p)[striplen:])
704 704 else:
705 705 # a file
706 706 if destdirexists:
707 707 res = lambda p: os.path.join(dest,
708 708 os.path.basename(util.localpath(p)))
709 709 else:
710 710 res = lambda p: dest
711 711 return res
712 712
713 713 pats = scmutil.expandpats(pats)
714 714 if not pats:
715 715 raise error.Abort(_('no source or destination specified'))
716 716 if len(pats) == 1:
717 717 raise error.Abort(_('no destination specified'))
718 718 dest = pats.pop()
719 719 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
720 720 if not destdirexists:
721 721 if len(pats) > 1 or matchmod.patkind(pats[0]):
722 722 raise error.Abort(_('with multiple sources, destination must be an '
723 723 'existing directory'))
724 724 if util.endswithsep(dest):
725 725 raise error.Abort(_('destination %s is not a directory') % dest)
726 726
727 727 tfn = targetpathfn
728 728 if after:
729 729 tfn = targetpathafterfn
730 730 copylist = []
731 731 for pat in pats:
732 732 srcs = walkpat(pat)
733 733 if not srcs:
734 734 continue
735 735 copylist.append((tfn(pat, dest, srcs), srcs))
736 736 if not copylist:
737 737 raise error.Abort(_('no files to copy'))
738 738
739 739 errors = 0
740 740 for targetpath, srcs in copylist:
741 741 for abssrc, relsrc, exact in srcs:
742 742 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
743 743 errors += 1
744 744
745 745 if errors:
746 746 ui.warn(_('(consider using --after)\n'))
747 747
748 748 return errors != 0
749 749
750 750 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
751 751 runargs=None, appendpid=False):
752 752 '''Run a command as a service.'''
753 753
754 754 def writepid(pid):
755 755 if opts['pid_file']:
756 756 if appendpid:
757 757 mode = 'a'
758 758 else:
759 759 mode = 'w'
760 760 fp = open(opts['pid_file'], mode)
761 761 fp.write(str(pid) + '\n')
762 762 fp.close()
763 763
764 764 if opts['daemon'] and not opts['daemon_pipefds']:
765 765 # Signal child process startup with file removal
766 766 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
767 767 os.close(lockfd)
768 768 try:
769 769 if not runargs:
770 770 runargs = util.hgcmd() + sys.argv[1:]
771 771 runargs.append('--daemon-pipefds=%s' % lockpath)
772 772 # Don't pass --cwd to the child process, because we've already
773 773 # changed directory.
774 774 for i in xrange(1, len(runargs)):
775 775 if runargs[i].startswith('--cwd='):
776 776 del runargs[i]
777 777 break
778 778 elif runargs[i].startswith('--cwd'):
779 779 del runargs[i:i + 2]
780 780 break
781 781 def condfn():
782 782 return not os.path.exists(lockpath)
783 783 pid = util.rundetached(runargs, condfn)
784 784 if pid < 0:
785 785 raise error.Abort(_('child process failed to start'))
786 786 writepid(pid)
787 787 finally:
788 788 try:
789 789 os.unlink(lockpath)
790 790 except OSError as e:
791 791 if e.errno != errno.ENOENT:
792 792 raise
793 793 if parentfn:
794 794 return parentfn(pid)
795 795 else:
796 796 return
797 797
798 798 if initfn:
799 799 initfn()
800 800
801 801 if not opts['daemon']:
802 802 writepid(os.getpid())
803 803
804 804 if opts['daemon_pipefds']:
805 805 lockpath = opts['daemon_pipefds']
806 806 try:
807 807 os.setsid()
808 808 except AttributeError:
809 809 pass
810 810 os.unlink(lockpath)
811 811 util.hidewindow()
812 812 sys.stdout.flush()
813 813 sys.stderr.flush()
814 814
815 815 nullfd = os.open(os.devnull, os.O_RDWR)
816 816 logfilefd = nullfd
817 817 if logfile:
818 818 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
819 819 os.dup2(nullfd, 0)
820 820 os.dup2(logfilefd, 1)
821 821 os.dup2(logfilefd, 2)
822 822 if nullfd not in (0, 1, 2):
823 823 os.close(nullfd)
824 824 if logfile and logfilefd not in (0, 1, 2):
825 825 os.close(logfilefd)
826 826
827 827 if runfn:
828 828 return runfn()
829 829
830 830 ## facility to let extension process additional data into an import patch
831 831 # list of identifier to be executed in order
832 832 extrapreimport = [] # run before commit
833 833 extrapostimport = [] # run after commit
834 834 # mapping from identifier to actual import function
835 835 #
836 836 # 'preimport' are run before the commit is made and are provided the following
837 837 # arguments:
838 838 # - repo: the localrepository instance,
839 839 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
840 840 # - extra: the future extra dictionary of the changeset, please mutate it,
841 841 # - opts: the import options.
842 842 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
843 843 # mutation of in memory commit and more. Feel free to rework the code to get
844 844 # there.
845 845 extrapreimportmap = {}
846 846 # 'postimport' are run after the commit is made and are provided the following
847 847 # argument:
848 848 # - ctx: the changectx created by import.
849 849 extrapostimportmap = {}
850 850
851 851 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
852 852 """Utility function used by commands.import to import a single patch
853 853
854 854 This function is explicitly defined here to help the evolve extension to
855 855 wrap this part of the import logic.
856 856
857 857 The API is currently a bit ugly because it a simple code translation from
858 858 the import command. Feel free to make it better.
859 859
860 860 :hunk: a patch (as a binary string)
861 861 :parents: nodes that will be parent of the created commit
862 862 :opts: the full dict of option passed to the import command
863 863 :msgs: list to save commit message to.
864 864 (used in case we need to save it when failing)
865 865 :updatefunc: a function that update a repo to a given node
866 866 updatefunc(<repo>, <node>)
867 867 """
868 868 # avoid cycle context -> subrepo -> cmdutil
869 869 import context
870 870 extractdata = patch.extract(ui, hunk)
871 871 tmpname = extractdata.get('filename')
872 872 message = extractdata.get('message')
873 873 user = extractdata.get('user')
874 874 date = extractdata.get('date')
875 875 branch = extractdata.get('branch')
876 876 nodeid = extractdata.get('nodeid')
877 877 p1 = extractdata.get('p1')
878 878 p2 = extractdata.get('p2')
879 879
880 880 update = not opts.get('bypass')
881 881 strip = opts["strip"]
882 882 prefix = opts["prefix"]
883 883 sim = float(opts.get('similarity') or 0)
884 884 if not tmpname:
885 885 return (None, None, False)
886 886 msg = _('applied to working directory')
887 887
888 888 rejects = False
889 889
890 890 try:
891 891 cmdline_message = logmessage(ui, opts)
892 892 if cmdline_message:
893 893 # pickup the cmdline msg
894 894 message = cmdline_message
895 895 elif message:
896 896 # pickup the patch msg
897 897 message = message.strip()
898 898 else:
899 899 # launch the editor
900 900 message = None
901 901 ui.debug('message:\n%s\n' % message)
902 902
903 903 if len(parents) == 1:
904 904 parents.append(repo[nullid])
905 905 if opts.get('exact'):
906 906 if not nodeid or not p1:
907 907 raise error.Abort(_('not a Mercurial patch'))
908 908 p1 = repo[p1]
909 909 p2 = repo[p2 or nullid]
910 910 elif p2:
911 911 try:
912 912 p1 = repo[p1]
913 913 p2 = repo[p2]
914 914 # Without any options, consider p2 only if the
915 915 # patch is being applied on top of the recorded
916 916 # first parent.
917 917 if p1 != parents[0]:
918 918 p1 = parents[0]
919 919 p2 = repo[nullid]
920 920 except error.RepoError:
921 921 p1, p2 = parents
922 922 if p2.node() == nullid:
923 923 ui.warn(_("warning: import the patch as a normal revision\n"
924 924 "(use --exact to import the patch as a merge)\n"))
925 925 else:
926 926 p1, p2 = parents
927 927
928 928 n = None
929 929 if update:
930 930 if p1 != parents[0]:
931 931 updatefunc(repo, p1.node())
932 932 if p2 != parents[1]:
933 933 repo.setparents(p1.node(), p2.node())
934 934
935 935 if opts.get('exact') or opts.get('import_branch'):
936 936 repo.dirstate.setbranch(branch or 'default')
937 937
938 938 partial = opts.get('partial', False)
939 939 files = set()
940 940 try:
941 941 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
942 942 files=files, eolmode=None, similarity=sim / 100.0)
943 943 except patch.PatchError as e:
944 944 if not partial:
945 945 raise error.Abort(str(e))
946 946 if partial:
947 947 rejects = True
948 948
949 949 files = list(files)
950 950 if opts.get('no_commit'):
951 951 if message:
952 952 msgs.append(message)
953 953 else:
954 954 if opts.get('exact') or p2:
955 955 # If you got here, you either use --force and know what
956 956 # you are doing or used --exact or a merge patch while
957 957 # being updated to its first parent.
958 958 m = None
959 959 else:
960 960 m = scmutil.matchfiles(repo, files or [])
961 961 editform = mergeeditform(repo[None], 'import.normal')
962 962 if opts.get('exact'):
963 963 editor = None
964 964 else:
965 965 editor = getcommiteditor(editform=editform, **opts)
966 966 allowemptyback = repo.ui.backupconfig('ui', 'allowemptycommit')
967 967 extra = {}
968 968 for idfunc in extrapreimport:
969 969 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
970 970 try:
971 971 if partial:
972 972 repo.ui.setconfig('ui', 'allowemptycommit', True)
973 973 n = repo.commit(message, opts.get('user') or user,
974 974 opts.get('date') or date, match=m,
975 975 editor=editor, extra=extra)
976 976 for idfunc in extrapostimport:
977 977 extrapostimportmap[idfunc](repo[n])
978 978 finally:
979 979 repo.ui.restoreconfig(allowemptyback)
980 980 else:
981 981 if opts.get('exact') or opts.get('import_branch'):
982 982 branch = branch or 'default'
983 983 else:
984 984 branch = p1.branch()
985 985 store = patch.filestore()
986 986 try:
987 987 files = set()
988 988 try:
989 989 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
990 990 files, eolmode=None)
991 991 except patch.PatchError as e:
992 992 raise error.Abort(str(e))
993 993 if opts.get('exact'):
994 994 editor = None
995 995 else:
996 996 editor = getcommiteditor(editform='import.bypass')
997 997 memctx = context.makememctx(repo, (p1.node(), p2.node()),
998 998 message,
999 999 opts.get('user') or user,
1000 1000 opts.get('date') or date,
1001 1001 branch, files, store,
1002 1002 editor=editor)
1003 1003 n = memctx.commit()
1004 1004 finally:
1005 1005 store.close()
1006 1006 if opts.get('exact') and opts.get('no_commit'):
1007 1007 # --exact with --no-commit is still useful in that it does merge
1008 1008 # and branch bits
1009 1009 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1010 1010 elif opts.get('exact') and hex(n) != nodeid:
1011 1011 raise error.Abort(_('patch is damaged or loses information'))
1012 1012 if n:
1013 1013 # i18n: refers to a short changeset id
1014 1014 msg = _('created %s') % short(n)
1015 1015 return (msg, n, rejects)
1016 1016 finally:
1017 1017 os.unlink(tmpname)
1018 1018
1019 1019 # facility to let extensions include additional data in an exported patch
1020 1020 # list of identifiers to be executed in order
1021 1021 extraexport = []
1022 1022 # mapping from identifier to actual export function
1023 1023 # function as to return a string to be added to the header or None
1024 1024 # it is given two arguments (sequencenumber, changectx)
1025 1025 extraexportmap = {}
1026 1026
1027 1027 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1028 1028 opts=None, match=None):
1029 1029 '''export changesets as hg patches.'''
1030 1030
1031 1031 total = len(revs)
1032 1032 revwidth = max([len(str(rev)) for rev in revs])
1033 1033 filemode = {}
1034 1034
1035 1035 def single(rev, seqno, fp):
1036 1036 ctx = repo[rev]
1037 1037 node = ctx.node()
1038 1038 parents = [p.node() for p in ctx.parents() if p]
1039 1039 branch = ctx.branch()
1040 1040 if switch_parent:
1041 1041 parents.reverse()
1042 1042
1043 1043 if parents:
1044 1044 prev = parents[0]
1045 1045 else:
1046 1046 prev = nullid
1047 1047
1048 1048 shouldclose = False
1049 1049 if not fp and len(template) > 0:
1050 1050 desc_lines = ctx.description().rstrip().split('\n')
1051 1051 desc = desc_lines[0] #Commit always has a first line.
1052 1052 fp = makefileobj(repo, template, node, desc=desc, total=total,
1053 1053 seqno=seqno, revwidth=revwidth, mode='wb',
1054 1054 modemap=filemode)
1055 1055 if fp != template:
1056 1056 shouldclose = True
1057 1057 if fp and fp != sys.stdout and util.safehasattr(fp, 'name'):
1058 1058 repo.ui.note("%s\n" % fp.name)
1059 1059
1060 1060 if not fp:
1061 1061 write = repo.ui.write
1062 1062 else:
1063 1063 def write(s, **kw):
1064 1064 fp.write(s)
1065 1065
1066 1066 write("# HG changeset patch\n")
1067 1067 write("# User %s\n" % ctx.user())
1068 1068 write("# Date %d %d\n" % ctx.date())
1069 1069 write("# %s\n" % util.datestr(ctx.date()))
1070 1070 if branch and branch != 'default':
1071 1071 write("# Branch %s\n" % branch)
1072 1072 write("# Node ID %s\n" % hex(node))
1073 1073 write("# Parent %s\n" % hex(prev))
1074 1074 if len(parents) > 1:
1075 1075 write("# Parent %s\n" % hex(parents[1]))
1076 1076
1077 1077 for headerid in extraexport:
1078 1078 header = extraexportmap[headerid](seqno, ctx)
1079 1079 if header is not None:
1080 1080 write('# %s\n' % header)
1081 1081 write(ctx.description().rstrip())
1082 1082 write("\n\n")
1083 1083
1084 1084 for chunk, label in patch.diffui(repo, prev, node, match, opts=opts):
1085 1085 write(chunk, label=label)
1086 1086
1087 1087 if shouldclose:
1088 1088 fp.close()
1089 1089
1090 1090 for seqno, rev in enumerate(revs):
1091 1091 single(rev, seqno + 1, fp)
1092 1092
1093 1093 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1094 1094 changes=None, stat=False, fp=None, prefix='',
1095 1095 root='', listsubrepos=False):
1096 1096 '''show diff or diffstat.'''
1097 1097 if fp is None:
1098 1098 write = ui.write
1099 1099 else:
1100 1100 def write(s, **kw):
1101 1101 fp.write(s)
1102 1102
1103 1103 if root:
1104 1104 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1105 1105 else:
1106 1106 relroot = ''
1107 1107 if relroot != '':
1108 1108 # XXX relative roots currently don't work if the root is within a
1109 1109 # subrepo
1110 1110 uirelroot = match.uipath(relroot)
1111 1111 relroot += '/'
1112 1112 for matchroot in match.files():
1113 1113 if not matchroot.startswith(relroot):
1114 1114 ui.warn(_('warning: %s not inside relative root %s\n') % (
1115 1115 match.uipath(matchroot), uirelroot))
1116 1116
1117 1117 if stat:
1118 1118 diffopts = diffopts.copy(context=0)
1119 1119 width = 80
1120 1120 if not ui.plain():
1121 1121 width = ui.termwidth()
1122 1122 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1123 1123 prefix=prefix, relroot=relroot)
1124 1124 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1125 1125 width=width,
1126 1126 git=diffopts.git):
1127 1127 write(chunk, label=label)
1128 1128 else:
1129 1129 for chunk, label in patch.diffui(repo, node1, node2, match,
1130 1130 changes, diffopts, prefix=prefix,
1131 1131 relroot=relroot):
1132 1132 write(chunk, label=label)
1133 1133
1134 1134 if listsubrepos:
1135 1135 ctx1 = repo[node1]
1136 1136 ctx2 = repo[node2]
1137 1137 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1138 1138 tempnode2 = node2
1139 1139 try:
1140 1140 if node2 is not None:
1141 1141 tempnode2 = ctx2.substate[subpath][1]
1142 1142 except KeyError:
1143 1143 # A subrepo that existed in node1 was deleted between node1 and
1144 1144 # node2 (inclusive). Thus, ctx2's substate won't contain that
1145 1145 # subpath. The best we can do is to ignore it.
1146 1146 tempnode2 = None
1147 1147 submatch = matchmod.narrowmatcher(subpath, match)
1148 1148 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1149 1149 stat=stat, fp=fp, prefix=prefix)
1150 1150
1151 1151 class changeset_printer(object):
1152 1152 '''show changeset information when templating not requested.'''
1153 1153
1154 1154 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1155 1155 self.ui = ui
1156 1156 self.repo = repo
1157 1157 self.buffered = buffered
1158 1158 self.matchfn = matchfn
1159 1159 self.diffopts = diffopts
1160 1160 self.header = {}
1161 1161 self.hunk = {}
1162 1162 self.lastheader = None
1163 1163 self.footer = None
1164 1164
1165 1165 def flush(self, ctx):
1166 1166 rev = ctx.rev()
1167 1167 if rev in self.header:
1168 1168 h = self.header[rev]
1169 1169 if h != self.lastheader:
1170 1170 self.lastheader = h
1171 1171 self.ui.write(h)
1172 1172 del self.header[rev]
1173 1173 if rev in self.hunk:
1174 1174 self.ui.write(self.hunk[rev])
1175 1175 del self.hunk[rev]
1176 1176 return 1
1177 1177 return 0
1178 1178
1179 1179 def close(self):
1180 1180 if self.footer:
1181 1181 self.ui.write(self.footer)
1182 1182
1183 1183 def show(self, ctx, copies=None, matchfn=None, **props):
1184 1184 if self.buffered:
1185 1185 self.ui.pushbuffer()
1186 1186 self._show(ctx, copies, matchfn, props)
1187 1187 self.hunk[ctx.rev()] = self.ui.popbuffer(labeled=True)
1188 1188 else:
1189 1189 self._show(ctx, copies, matchfn, props)
1190 1190
1191 1191 def _show(self, ctx, copies, matchfn, props):
1192 1192 '''show a single changeset or file revision'''
1193 1193 changenode = ctx.node()
1194 1194 rev = ctx.rev()
1195 1195 if self.ui.debugflag:
1196 1196 hexfunc = hex
1197 1197 else:
1198 1198 hexfunc = short
1199 1199 # as of now, wctx.node() and wctx.rev() return None, but we want to
1200 1200 # show the same values as {node} and {rev} templatekw
1201 1201 revnode = (scmutil.intrev(rev), hexfunc(bin(ctx.hex())))
1202 1202
1203 1203 if self.ui.quiet:
1204 1204 self.ui.write("%d:%s\n" % revnode, label='log.node')
1205 1205 return
1206 1206
1207 1207 date = util.datestr(ctx.date())
1208 1208
1209 1209 # i18n: column positioning for "hg log"
1210 1210 self.ui.write(_("changeset: %d:%s\n") % revnode,
1211 1211 label='log.changeset changeset.%s' % ctx.phasestr())
1212 1212
1213 1213 # branches are shown first before any other names due to backwards
1214 1214 # compatibility
1215 1215 branch = ctx.branch()
1216 1216 # don't show the default branch name
1217 1217 if branch != 'default':
1218 1218 # i18n: column positioning for "hg log"
1219 1219 self.ui.write(_("branch: %s\n") % branch,
1220 1220 label='log.branch')
1221 1221
1222 1222 for name, ns in self.repo.names.iteritems():
1223 1223 # branches has special logic already handled above, so here we just
1224 1224 # skip it
1225 1225 if name == 'branches':
1226 1226 continue
1227 1227 # we will use the templatename as the color name since those two
1228 1228 # should be the same
1229 1229 for name in ns.names(self.repo, changenode):
1230 1230 self.ui.write(ns.logfmt % name,
1231 1231 label='log.%s' % ns.colorname)
1232 1232 if self.ui.debugflag:
1233 1233 # i18n: column positioning for "hg log"
1234 1234 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1235 1235 label='log.phase')
1236 1236 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1237 1237 label = 'log.parent changeset.%s' % pctx.phasestr()
1238 1238 # i18n: column positioning for "hg log"
1239 1239 self.ui.write(_("parent: %d:%s\n")
1240 1240 % (pctx.rev(), hexfunc(pctx.node())),
1241 1241 label=label)
1242 1242
1243 1243 if self.ui.debugflag and rev is not None:
1244 1244 mnode = ctx.manifestnode()
1245 1245 # i18n: column positioning for "hg log"
1246 1246 self.ui.write(_("manifest: %d:%s\n") %
1247 1247 (self.repo.manifest.rev(mnode), hex(mnode)),
1248 1248 label='ui.debug log.manifest')
1249 1249 # i18n: column positioning for "hg log"
1250 1250 self.ui.write(_("user: %s\n") % ctx.user(),
1251 1251 label='log.user')
1252 1252 # i18n: column positioning for "hg log"
1253 1253 self.ui.write(_("date: %s\n") % date,
1254 1254 label='log.date')
1255 1255
1256 1256 if self.ui.debugflag:
1257 1257 files = ctx.p1().status(ctx)[:3]
1258 1258 for key, value in zip([# i18n: column positioning for "hg log"
1259 1259 _("files:"),
1260 1260 # i18n: column positioning for "hg log"
1261 1261 _("files+:"),
1262 1262 # i18n: column positioning for "hg log"
1263 1263 _("files-:")], files):
1264 1264 if value:
1265 1265 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1266 1266 label='ui.debug log.files')
1267 1267 elif ctx.files() and self.ui.verbose:
1268 1268 # i18n: column positioning for "hg log"
1269 1269 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1270 1270 label='ui.note log.files')
1271 1271 if copies and self.ui.verbose:
1272 1272 copies = ['%s (%s)' % c for c in copies]
1273 1273 # i18n: column positioning for "hg log"
1274 1274 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1275 1275 label='ui.note log.copies')
1276 1276
1277 1277 extra = ctx.extra()
1278 1278 if extra and self.ui.debugflag:
1279 1279 for key, value in sorted(extra.items()):
1280 1280 # i18n: column positioning for "hg log"
1281 1281 self.ui.write(_("extra: %s=%s\n")
1282 1282 % (key, value.encode('string_escape')),
1283 1283 label='ui.debug log.extra')
1284 1284
1285 1285 description = ctx.description().strip()
1286 1286 if description:
1287 1287 if self.ui.verbose:
1288 1288 self.ui.write(_("description:\n"),
1289 1289 label='ui.note log.description')
1290 1290 self.ui.write(description,
1291 1291 label='ui.note log.description')
1292 1292 self.ui.write("\n\n")
1293 1293 else:
1294 1294 # i18n: column positioning for "hg log"
1295 1295 self.ui.write(_("summary: %s\n") %
1296 1296 description.splitlines()[0],
1297 1297 label='log.summary')
1298 1298 self.ui.write("\n")
1299 1299
1300 1300 self.showpatch(changenode, matchfn)
1301 1301
1302 1302 def showpatch(self, node, matchfn):
1303 1303 if not matchfn:
1304 1304 matchfn = self.matchfn
1305 1305 if matchfn:
1306 1306 stat = self.diffopts.get('stat')
1307 1307 diff = self.diffopts.get('patch')
1308 1308 diffopts = patch.diffallopts(self.ui, self.diffopts)
1309 1309 prev = self.repo.changelog.parents(node)[0]
1310 1310 if stat:
1311 1311 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1312 1312 match=matchfn, stat=True)
1313 1313 if diff:
1314 1314 if stat:
1315 1315 self.ui.write("\n")
1316 1316 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1317 1317 match=matchfn, stat=False)
1318 1318 self.ui.write("\n")
1319 1319
1320 1320 class jsonchangeset(changeset_printer):
1321 1321 '''format changeset information.'''
1322 1322
1323 1323 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1324 1324 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1325 1325 self.cache = {}
1326 1326 self._first = True
1327 1327
1328 1328 def close(self):
1329 1329 if not self._first:
1330 1330 self.ui.write("\n]\n")
1331 1331 else:
1332 1332 self.ui.write("[]\n")
1333 1333
1334 1334 def _show(self, ctx, copies, matchfn, props):
1335 1335 '''show a single changeset or file revision'''
1336 1336 rev = ctx.rev()
1337 1337 if rev is None:
1338 1338 jrev = jnode = 'null'
1339 1339 else:
1340 1340 jrev = str(rev)
1341 1341 jnode = '"%s"' % hex(ctx.node())
1342 1342 j = encoding.jsonescape
1343 1343
1344 1344 if self._first:
1345 1345 self.ui.write("[\n {")
1346 1346 self._first = False
1347 1347 else:
1348 1348 self.ui.write(",\n {")
1349 1349
1350 1350 if self.ui.quiet:
1351 1351 self.ui.write('\n "rev": %s' % jrev)
1352 1352 self.ui.write(',\n "node": %s' % jnode)
1353 1353 self.ui.write('\n }')
1354 1354 return
1355 1355
1356 1356 self.ui.write('\n "rev": %s' % jrev)
1357 1357 self.ui.write(',\n "node": %s' % jnode)
1358 1358 self.ui.write(',\n "branch": "%s"' % j(ctx.branch()))
1359 1359 self.ui.write(',\n "phase": "%s"' % ctx.phasestr())
1360 1360 self.ui.write(',\n "user": "%s"' % j(ctx.user()))
1361 1361 self.ui.write(',\n "date": [%d, %d]' % ctx.date())
1362 1362 self.ui.write(',\n "desc": "%s"' % j(ctx.description()))
1363 1363
1364 1364 self.ui.write(',\n "bookmarks": [%s]' %
1365 1365 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1366 1366 self.ui.write(',\n "tags": [%s]' %
1367 1367 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1368 1368 self.ui.write(',\n "parents": [%s]' %
1369 1369 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1370 1370
1371 1371 if self.ui.debugflag:
1372 1372 if rev is None:
1373 1373 jmanifestnode = 'null'
1374 1374 else:
1375 1375 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1376 1376 self.ui.write(',\n "manifest": %s' % jmanifestnode)
1377 1377
1378 1378 self.ui.write(',\n "extra": {%s}' %
1379 1379 ", ".join('"%s": "%s"' % (j(k), j(v))
1380 1380 for k, v in ctx.extra().items()))
1381 1381
1382 1382 files = ctx.p1().status(ctx)
1383 1383 self.ui.write(',\n "modified": [%s]' %
1384 1384 ", ".join('"%s"' % j(f) for f in files[0]))
1385 1385 self.ui.write(',\n "added": [%s]' %
1386 1386 ", ".join('"%s"' % j(f) for f in files[1]))
1387 1387 self.ui.write(',\n "removed": [%s]' %
1388 1388 ", ".join('"%s"' % j(f) for f in files[2]))
1389 1389
1390 1390 elif self.ui.verbose:
1391 1391 self.ui.write(',\n "files": [%s]' %
1392 1392 ", ".join('"%s"' % j(f) for f in ctx.files()))
1393 1393
1394 1394 if copies:
1395 1395 self.ui.write(',\n "copies": {%s}' %
1396 1396 ", ".join('"%s": "%s"' % (j(k), j(v))
1397 1397 for k, v in copies))
1398 1398
1399 1399 matchfn = self.matchfn
1400 1400 if matchfn:
1401 1401 stat = self.diffopts.get('stat')
1402 1402 diff = self.diffopts.get('patch')
1403 1403 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1404 1404 node, prev = ctx.node(), ctx.p1().node()
1405 1405 if stat:
1406 1406 self.ui.pushbuffer()
1407 1407 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1408 1408 match=matchfn, stat=True)
1409 1409 self.ui.write(',\n "diffstat": "%s"' % j(self.ui.popbuffer()))
1410 1410 if diff:
1411 1411 self.ui.pushbuffer()
1412 1412 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1413 1413 match=matchfn, stat=False)
1414 1414 self.ui.write(',\n "diff": "%s"' % j(self.ui.popbuffer()))
1415 1415
1416 1416 self.ui.write("\n }")
1417 1417
1418 1418 class changeset_templater(changeset_printer):
1419 1419 '''format changeset information.'''
1420 1420
1421 1421 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1422 1422 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1423 1423 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
1424 1424 defaulttempl = {
1425 1425 'parent': '{rev}:{node|formatnode} ',
1426 1426 'manifest': '{rev}:{node|formatnode}',
1427 1427 'file_copy': '{name} ({source})',
1428 1428 'extra': '{key}={value|stringescape}'
1429 1429 }
1430 1430 # filecopy is preserved for compatibility reasons
1431 1431 defaulttempl['filecopy'] = defaulttempl['file_copy']
1432 1432 self.t = templater.templater(mapfile, {'formatnode': formatnode},
1433 1433 cache=defaulttempl)
1434 1434 if tmpl:
1435 1435 self.t.cache['changeset'] = tmpl
1436 1436
1437 1437 self.cache = {}
1438 1438
1439 1439 # find correct templates for current mode
1440 1440 tmplmodes = [
1441 1441 (True, None),
1442 1442 (self.ui.verbose, 'verbose'),
1443 1443 (self.ui.quiet, 'quiet'),
1444 1444 (self.ui.debugflag, 'debug'),
1445 1445 ]
1446 1446
1447 1447 self._parts = {'header': '', 'footer': '', 'changeset': 'changeset',
1448 1448 'docheader': '', 'docfooter': ''}
1449 1449 for mode, postfix in tmplmodes:
1450 1450 for t in self._parts:
1451 1451 cur = t
1452 1452 if postfix:
1453 1453 cur += "_" + postfix
1454 1454 if mode and cur in self.t:
1455 1455 self._parts[t] = cur
1456 1456
1457 1457 if self._parts['docheader']:
1458 1458 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1459 1459
1460 1460 def close(self):
1461 1461 if self._parts['docfooter']:
1462 1462 if not self.footer:
1463 1463 self.footer = ""
1464 1464 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1465 1465 return super(changeset_templater, self).close()
1466 1466
1467 1467 def _show(self, ctx, copies, matchfn, props):
1468 1468 '''show a single changeset or file revision'''
1469 1469 props = props.copy()
1470 1470 props.update(templatekw.keywords)
1471 1471 props['templ'] = self.t
1472 1472 props['ctx'] = ctx
1473 1473 props['repo'] = self.repo
1474 1474 props['revcache'] = {'copies': copies}
1475 1475 props['cache'] = self.cache
1476 1476
1477 1477 try:
1478 1478 # write header
1479 1479 if self._parts['header']:
1480 1480 h = templater.stringify(self.t(self._parts['header'], **props))
1481 1481 if self.buffered:
1482 1482 self.header[ctx.rev()] = h
1483 1483 else:
1484 1484 if self.lastheader != h:
1485 1485 self.lastheader = h
1486 1486 self.ui.write(h)
1487 1487
1488 1488 # write changeset metadata, then patch if requested
1489 1489 key = self._parts['changeset']
1490 1490 self.ui.write(templater.stringify(self.t(key, **props)))
1491 1491 self.showpatch(ctx.node(), matchfn)
1492 1492
1493 1493 if self._parts['footer']:
1494 1494 if not self.footer:
1495 1495 self.footer = templater.stringify(
1496 1496 self.t(self._parts['footer'], **props))
1497 1497 except KeyError as inst:
1498 1498 msg = _("%s: no key named '%s'")
1499 1499 raise error.Abort(msg % (self.t.mapfile, inst.args[0]))
1500 1500 except SyntaxError as inst:
1501 1501 raise error.Abort('%s: %s' % (self.t.mapfile, inst.args[0]))
1502 1502
1503 1503 def gettemplate(ui, tmpl, style):
1504 1504 """
1505 1505 Find the template matching the given template spec or style.
1506 1506 """
1507 1507
1508 1508 # ui settings
1509 1509 if not tmpl and not style: # template are stronger than style
1510 1510 tmpl = ui.config('ui', 'logtemplate')
1511 1511 if tmpl:
1512 1512 try:
1513 1513 tmpl = templater.unquotestring(tmpl)
1514 1514 except SyntaxError:
1515 1515 pass
1516 1516 return tmpl, None
1517 1517 else:
1518 1518 style = util.expandpath(ui.config('ui', 'style', ''))
1519 1519
1520 1520 if not tmpl and style:
1521 1521 mapfile = style
1522 1522 if not os.path.split(mapfile)[0]:
1523 1523 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1524 1524 or templater.templatepath(mapfile))
1525 1525 if mapname:
1526 1526 mapfile = mapname
1527 1527 return None, mapfile
1528 1528
1529 1529 if not tmpl:
1530 1530 return None, None
1531 1531
1532 1532 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1533 1533
1534 1534 def show_changeset(ui, repo, opts, buffered=False):
1535 1535 """show one changeset using template or regular display.
1536 1536
1537 1537 Display format will be the first non-empty hit of:
1538 1538 1. option 'template'
1539 1539 2. option 'style'
1540 1540 3. [ui] setting 'logtemplate'
1541 1541 4. [ui] setting 'style'
1542 1542 If all of these values are either the unset or the empty string,
1543 1543 regular display via changeset_printer() is done.
1544 1544 """
1545 1545 # options
1546 1546 matchfn = None
1547 1547 if opts.get('patch') or opts.get('stat'):
1548 1548 matchfn = scmutil.matchall(repo)
1549 1549
1550 1550 if opts.get('template') == 'json':
1551 1551 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1552 1552
1553 1553 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1554 1554
1555 1555 if not tmpl and not mapfile:
1556 1556 return changeset_printer(ui, repo, matchfn, opts, buffered)
1557 1557
1558 1558 try:
1559 1559 t = changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile,
1560 1560 buffered)
1561 1561 except SyntaxError as inst:
1562 1562 raise error.Abort(inst.args[0])
1563 1563 return t
1564 1564
1565 1565 def showmarker(ui, marker):
1566 1566 """utility function to display obsolescence marker in a readable way
1567 1567
1568 1568 To be used by debug function."""
1569 1569 ui.write(hex(marker.precnode()))
1570 1570 for repl in marker.succnodes():
1571 1571 ui.write(' ')
1572 1572 ui.write(hex(repl))
1573 1573 ui.write(' %X ' % marker.flags())
1574 1574 parents = marker.parentnodes()
1575 1575 if parents is not None:
1576 1576 ui.write('{%s} ' % ', '.join(hex(p) for p in parents))
1577 1577 ui.write('(%s) ' % util.datestr(marker.date()))
1578 1578 ui.write('{%s}' % (', '.join('%r: %r' % t for t in
1579 1579 sorted(marker.metadata().items())
1580 1580 if t[0] != 'date')))
1581 1581 ui.write('\n')
1582 1582
1583 1583 def finddate(ui, repo, date):
1584 1584 """Find the tipmost changeset that matches the given date spec"""
1585 1585
1586 1586 df = util.matchdate(date)
1587 1587 m = scmutil.matchall(repo)
1588 1588 results = {}
1589 1589
1590 1590 def prep(ctx, fns):
1591 1591 d = ctx.date()
1592 1592 if df(d[0]):
1593 1593 results[ctx.rev()] = d
1594 1594
1595 1595 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1596 1596 rev = ctx.rev()
1597 1597 if rev in results:
1598 1598 ui.status(_("found revision %s from %s\n") %
1599 1599 (rev, util.datestr(results[rev])))
1600 1600 return str(rev)
1601 1601
1602 1602 raise error.Abort(_("revision matching date not found"))
1603 1603
1604 1604 def increasingwindows(windowsize=8, sizelimit=512):
1605 1605 while True:
1606 1606 yield windowsize
1607 1607 if windowsize < sizelimit:
1608 1608 windowsize *= 2
1609 1609
1610 1610 class FileWalkError(Exception):
1611 1611 pass
1612 1612
1613 1613 def walkfilerevs(repo, match, follow, revs, fncache):
1614 1614 '''Walks the file history for the matched files.
1615 1615
1616 1616 Returns the changeset revs that are involved in the file history.
1617 1617
1618 1618 Throws FileWalkError if the file history can't be walked using
1619 1619 filelogs alone.
1620 1620 '''
1621 1621 wanted = set()
1622 1622 copies = []
1623 1623 minrev, maxrev = min(revs), max(revs)
1624 1624 def filerevgen(filelog, last):
1625 1625 """
1626 1626 Only files, no patterns. Check the history of each file.
1627 1627
1628 1628 Examines filelog entries within minrev, maxrev linkrev range
1629 1629 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1630 1630 tuples in backwards order
1631 1631 """
1632 1632 cl_count = len(repo)
1633 1633 revs = []
1634 1634 for j in xrange(0, last + 1):
1635 1635 linkrev = filelog.linkrev(j)
1636 1636 if linkrev < minrev:
1637 1637 continue
1638 1638 # only yield rev for which we have the changelog, it can
1639 1639 # happen while doing "hg log" during a pull or commit
1640 1640 if linkrev >= cl_count:
1641 1641 break
1642 1642
1643 1643 parentlinkrevs = []
1644 1644 for p in filelog.parentrevs(j):
1645 1645 if p != nullrev:
1646 1646 parentlinkrevs.append(filelog.linkrev(p))
1647 1647 n = filelog.node(j)
1648 1648 revs.append((linkrev, parentlinkrevs,
1649 1649 follow and filelog.renamed(n)))
1650 1650
1651 1651 return reversed(revs)
1652 1652 def iterfiles():
1653 1653 pctx = repo['.']
1654 1654 for filename in match.files():
1655 1655 if follow:
1656 1656 if filename not in pctx:
1657 1657 raise error.Abort(_('cannot follow file not in parent '
1658 1658 'revision: "%s"') % filename)
1659 1659 yield filename, pctx[filename].filenode()
1660 1660 else:
1661 1661 yield filename, None
1662 1662 for filename_node in copies:
1663 1663 yield filename_node
1664 1664
1665 1665 for file_, node in iterfiles():
1666 1666 filelog = repo.file(file_)
1667 1667 if not len(filelog):
1668 1668 if node is None:
1669 1669 # A zero count may be a directory or deleted file, so
1670 1670 # try to find matching entries on the slow path.
1671 1671 if follow:
1672 1672 raise error.Abort(
1673 1673 _('cannot follow nonexistent file: "%s"') % file_)
1674 1674 raise FileWalkError("Cannot walk via filelog")
1675 1675 else:
1676 1676 continue
1677 1677
1678 1678 if node is None:
1679 1679 last = len(filelog) - 1
1680 1680 else:
1681 1681 last = filelog.rev(node)
1682 1682
1683 1683 # keep track of all ancestors of the file
1684 1684 ancestors = set([filelog.linkrev(last)])
1685 1685
1686 1686 # iterate from latest to oldest revision
1687 1687 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1688 1688 if not follow:
1689 1689 if rev > maxrev:
1690 1690 continue
1691 1691 else:
1692 1692 # Note that last might not be the first interesting
1693 1693 # rev to us:
1694 1694 # if the file has been changed after maxrev, we'll
1695 1695 # have linkrev(last) > maxrev, and we still need
1696 1696 # to explore the file graph
1697 1697 if rev not in ancestors:
1698 1698 continue
1699 1699 # XXX insert 1327 fix here
1700 1700 if flparentlinkrevs:
1701 1701 ancestors.update(flparentlinkrevs)
1702 1702
1703 1703 fncache.setdefault(rev, []).append(file_)
1704 1704 wanted.add(rev)
1705 1705 if copied:
1706 1706 copies.append(copied)
1707 1707
1708 1708 return wanted
1709 1709
1710 1710 class _followfilter(object):
1711 1711 def __init__(self, repo, onlyfirst=False):
1712 1712 self.repo = repo
1713 1713 self.startrev = nullrev
1714 1714 self.roots = set()
1715 1715 self.onlyfirst = onlyfirst
1716 1716
1717 1717 def match(self, rev):
1718 1718 def realparents(rev):
1719 1719 if self.onlyfirst:
1720 1720 return self.repo.changelog.parentrevs(rev)[0:1]
1721 1721 else:
1722 1722 return filter(lambda x: x != nullrev,
1723 1723 self.repo.changelog.parentrevs(rev))
1724 1724
1725 1725 if self.startrev == nullrev:
1726 1726 self.startrev = rev
1727 1727 return True
1728 1728
1729 1729 if rev > self.startrev:
1730 1730 # forward: all descendants
1731 1731 if not self.roots:
1732 1732 self.roots.add(self.startrev)
1733 1733 for parent in realparents(rev):
1734 1734 if parent in self.roots:
1735 1735 self.roots.add(rev)
1736 1736 return True
1737 1737 else:
1738 1738 # backwards: all parents
1739 1739 if not self.roots:
1740 1740 self.roots.update(realparents(self.startrev))
1741 1741 if rev in self.roots:
1742 1742 self.roots.remove(rev)
1743 1743 self.roots.update(realparents(rev))
1744 1744 return True
1745 1745
1746 1746 return False
1747 1747
1748 1748 def walkchangerevs(repo, match, opts, prepare):
1749 1749 '''Iterate over files and the revs in which they changed.
1750 1750
1751 1751 Callers most commonly need to iterate backwards over the history
1752 1752 in which they are interested. Doing so has awful (quadratic-looking)
1753 1753 performance, so we use iterators in a "windowed" way.
1754 1754
1755 1755 We walk a window of revisions in the desired order. Within the
1756 1756 window, we first walk forwards to gather data, then in the desired
1757 1757 order (usually backwards) to display it.
1758 1758
1759 1759 This function returns an iterator yielding contexts. Before
1760 1760 yielding each context, the iterator will first call the prepare
1761 1761 function on each context in the window in forward order.'''
1762 1762
1763 1763 follow = opts.get('follow') or opts.get('follow_first')
1764 1764 revs = _logrevs(repo, opts)
1765 1765 if not revs:
1766 1766 return []
1767 1767 wanted = set()
1768 1768 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1769 1769 opts.get('removed'))
1770 1770 fncache = {}
1771 1771 change = repo.changectx
1772 1772
1773 1773 # First step is to fill wanted, the set of revisions that we want to yield.
1774 1774 # When it does not induce extra cost, we also fill fncache for revisions in
1775 1775 # wanted: a cache of filenames that were changed (ctx.files()) and that
1776 1776 # match the file filtering conditions.
1777 1777
1778 1778 if match.always():
1779 1779 # No files, no patterns. Display all revs.
1780 1780 wanted = revs
1781 1781 elif not slowpath:
1782 1782 # We only have to read through the filelog to find wanted revisions
1783 1783
1784 1784 try:
1785 1785 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1786 1786 except FileWalkError:
1787 1787 slowpath = True
1788 1788
1789 1789 # We decided to fall back to the slowpath because at least one
1790 1790 # of the paths was not a file. Check to see if at least one of them
1791 1791 # existed in history, otherwise simply return
1792 1792 for path in match.files():
1793 1793 if path == '.' or path in repo.store:
1794 1794 break
1795 1795 else:
1796 1796 return []
1797 1797
1798 1798 if slowpath:
1799 1799 # We have to read the changelog to match filenames against
1800 1800 # changed files
1801 1801
1802 1802 if follow:
1803 1803 raise error.Abort(_('can only follow copies/renames for explicit '
1804 1804 'filenames'))
1805 1805
1806 1806 # The slow path checks files modified in every changeset.
1807 1807 # This is really slow on large repos, so compute the set lazily.
1808 1808 class lazywantedset(object):
1809 1809 def __init__(self):
1810 1810 self.set = set()
1811 1811 self.revs = set(revs)
1812 1812
1813 1813 # No need to worry about locality here because it will be accessed
1814 1814 # in the same order as the increasing window below.
1815 1815 def __contains__(self, value):
1816 1816 if value in self.set:
1817 1817 return True
1818 1818 elif not value in self.revs:
1819 1819 return False
1820 1820 else:
1821 1821 self.revs.discard(value)
1822 1822 ctx = change(value)
1823 1823 matches = filter(match, ctx.files())
1824 1824 if matches:
1825 1825 fncache[value] = matches
1826 1826 self.set.add(value)
1827 1827 return True
1828 1828 return False
1829 1829
1830 1830 def discard(self, value):
1831 1831 self.revs.discard(value)
1832 1832 self.set.discard(value)
1833 1833
1834 1834 wanted = lazywantedset()
1835 1835
1836 1836 # it might be worthwhile to do this in the iterator if the rev range
1837 1837 # is descending and the prune args are all within that range
1838 1838 for rev in opts.get('prune', ()):
1839 1839 rev = repo[rev].rev()
1840 1840 ff = _followfilter(repo)
1841 1841 stop = min(revs[0], revs[-1])
1842 1842 for x in xrange(rev, stop - 1, -1):
1843 1843 if ff.match(x):
1844 1844 wanted = wanted - [x]
1845 1845
1846 1846 # Now that wanted is correctly initialized, we can iterate over the
1847 1847 # revision range, yielding only revisions in wanted.
1848 1848 def iterate():
1849 1849 if follow and match.always():
1850 1850 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1851 1851 def want(rev):
1852 1852 return ff.match(rev) and rev in wanted
1853 1853 else:
1854 1854 def want(rev):
1855 1855 return rev in wanted
1856 1856
1857 1857 it = iter(revs)
1858 1858 stopiteration = False
1859 1859 for windowsize in increasingwindows():
1860 1860 nrevs = []
1861 1861 for i in xrange(windowsize):
1862 1862 rev = next(it, None)
1863 1863 if rev is None:
1864 1864 stopiteration = True
1865 1865 break
1866 1866 elif want(rev):
1867 1867 nrevs.append(rev)
1868 1868 for rev in sorted(nrevs):
1869 1869 fns = fncache.get(rev)
1870 1870 ctx = change(rev)
1871 1871 if not fns:
1872 1872 def fns_generator():
1873 1873 for f in ctx.files():
1874 1874 if match(f):
1875 1875 yield f
1876 1876 fns = fns_generator()
1877 1877 prepare(ctx, fns)
1878 1878 for rev in nrevs:
1879 1879 yield change(rev)
1880 1880
1881 1881 if stopiteration:
1882 1882 break
1883 1883
1884 1884 return iterate()
1885 1885
1886 1886 def _makefollowlogfilematcher(repo, files, followfirst):
1887 1887 # When displaying a revision with --patch --follow FILE, we have
1888 1888 # to know which file of the revision must be diffed. With
1889 1889 # --follow, we want the names of the ancestors of FILE in the
1890 1890 # revision, stored in "fcache". "fcache" is populated by
1891 1891 # reproducing the graph traversal already done by --follow revset
1892 1892 # and relating linkrevs to file names (which is not "correct" but
1893 1893 # good enough).
1894 1894 fcache = {}
1895 1895 fcacheready = [False]
1896 1896 pctx = repo['.']
1897 1897
1898 1898 def populate():
1899 1899 for fn in files:
1900 1900 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
1901 1901 for c in i:
1902 1902 fcache.setdefault(c.linkrev(), set()).add(c.path())
1903 1903
1904 1904 def filematcher(rev):
1905 1905 if not fcacheready[0]:
1906 1906 # Lazy initialization
1907 1907 fcacheready[0] = True
1908 1908 populate()
1909 1909 return scmutil.matchfiles(repo, fcache.get(rev, []))
1910 1910
1911 1911 return filematcher
1912 1912
1913 1913 def _makenofollowlogfilematcher(repo, pats, opts):
1914 1914 '''hook for extensions to override the filematcher for non-follow cases'''
1915 1915 return None
1916 1916
1917 1917 def _makelogrevset(repo, pats, opts, revs):
1918 1918 """Return (expr, filematcher) where expr is a revset string built
1919 1919 from log options and file patterns or None. If --stat or --patch
1920 1920 are not passed filematcher is None. Otherwise it is a callable
1921 1921 taking a revision number and returning a match objects filtering
1922 1922 the files to be detailed when displaying the revision.
1923 1923 """
1924 1924 opt2revset = {
1925 1925 'no_merges': ('not merge()', None),
1926 1926 'only_merges': ('merge()', None),
1927 1927 '_ancestors': ('ancestors(%(val)s)', None),
1928 1928 '_fancestors': ('_firstancestors(%(val)s)', None),
1929 1929 '_descendants': ('descendants(%(val)s)', None),
1930 1930 '_fdescendants': ('_firstdescendants(%(val)s)', None),
1931 1931 '_matchfiles': ('_matchfiles(%(val)s)', None),
1932 1932 'date': ('date(%(val)r)', None),
1933 1933 'branch': ('branch(%(val)r)', ' or '),
1934 1934 '_patslog': ('filelog(%(val)r)', ' or '),
1935 1935 '_patsfollow': ('follow(%(val)r)', ' or '),
1936 1936 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
1937 1937 'keyword': ('keyword(%(val)r)', ' or '),
1938 1938 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
1939 1939 'user': ('user(%(val)r)', ' or '),
1940 1940 }
1941 1941
1942 1942 opts = dict(opts)
1943 1943 # follow or not follow?
1944 1944 follow = opts.get('follow') or opts.get('follow_first')
1945 1945 if opts.get('follow_first'):
1946 1946 followfirst = 1
1947 1947 else:
1948 1948 followfirst = 0
1949 1949 # --follow with FILE behavior depends on revs...
1950 1950 it = iter(revs)
1951 1951 startrev = it.next()
1952 1952 followdescendants = startrev < next(it, startrev)
1953 1953
1954 1954 # branch and only_branch are really aliases and must be handled at
1955 1955 # the same time
1956 1956 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
1957 1957 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
1958 1958 # pats/include/exclude are passed to match.match() directly in
1959 1959 # _matchfiles() revset but walkchangerevs() builds its matcher with
1960 1960 # scmutil.match(). The difference is input pats are globbed on
1961 1961 # platforms without shell expansion (windows).
1962 1962 wctx = repo[None]
1963 1963 match, pats = scmutil.matchandpats(wctx, pats, opts)
1964 1964 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1965 1965 opts.get('removed'))
1966 1966 if not slowpath:
1967 1967 for f in match.files():
1968 1968 if follow and f not in wctx:
1969 1969 # If the file exists, it may be a directory, so let it
1970 1970 # take the slow path.
1971 1971 if os.path.exists(repo.wjoin(f)):
1972 1972 slowpath = True
1973 1973 continue
1974 1974 else:
1975 1975 raise error.Abort(_('cannot follow file not in parent '
1976 1976 'revision: "%s"') % f)
1977 1977 filelog = repo.file(f)
1978 1978 if not filelog:
1979 1979 # A zero count may be a directory or deleted file, so
1980 1980 # try to find matching entries on the slow path.
1981 1981 if follow:
1982 1982 raise error.Abort(
1983 1983 _('cannot follow nonexistent file: "%s"') % f)
1984 1984 slowpath = True
1985 1985
1986 1986 # We decided to fall back to the slowpath because at least one
1987 1987 # of the paths was not a file. Check to see if at least one of them
1988 1988 # existed in history - in that case, we'll continue down the
1989 1989 # slowpath; otherwise, we can turn off the slowpath
1990 1990 if slowpath:
1991 1991 for path in match.files():
1992 1992 if path == '.' or path in repo.store:
1993 1993 break
1994 1994 else:
1995 1995 slowpath = False
1996 1996
1997 1997 fpats = ('_patsfollow', '_patsfollowfirst')
1998 1998 fnopats = (('_ancestors', '_fancestors'),
1999 1999 ('_descendants', '_fdescendants'))
2000 2000 if slowpath:
2001 2001 # See walkchangerevs() slow path.
2002 2002 #
2003 2003 # pats/include/exclude cannot be represented as separate
2004 2004 # revset expressions as their filtering logic applies at file
2005 2005 # level. For instance "-I a -X a" matches a revision touching
2006 2006 # "a" and "b" while "file(a) and not file(b)" does
2007 2007 # not. Besides, filesets are evaluated against the working
2008 2008 # directory.
2009 2009 matchargs = ['r:', 'd:relpath']
2010 2010 for p in pats:
2011 2011 matchargs.append('p:' + p)
2012 2012 for p in opts.get('include', []):
2013 2013 matchargs.append('i:' + p)
2014 2014 for p in opts.get('exclude', []):
2015 2015 matchargs.append('x:' + p)
2016 2016 matchargs = ','.join(('%r' % p) for p in matchargs)
2017 2017 opts['_matchfiles'] = matchargs
2018 2018 if follow:
2019 2019 opts[fnopats[0][followfirst]] = '.'
2020 2020 else:
2021 2021 if follow:
2022 2022 if pats:
2023 2023 # follow() revset interprets its file argument as a
2024 2024 # manifest entry, so use match.files(), not pats.
2025 2025 opts[fpats[followfirst]] = list(match.files())
2026 2026 else:
2027 2027 op = fnopats[followdescendants][followfirst]
2028 2028 opts[op] = 'rev(%d)' % startrev
2029 2029 else:
2030 2030 opts['_patslog'] = list(pats)
2031 2031
2032 2032 filematcher = None
2033 2033 if opts.get('patch') or opts.get('stat'):
2034 2034 # When following files, track renames via a special matcher.
2035 2035 # If we're forced to take the slowpath it means we're following
2036 2036 # at least one pattern/directory, so don't bother with rename tracking.
2037 2037 if follow and not match.always() and not slowpath:
2038 2038 # _makefollowlogfilematcher expects its files argument to be
2039 2039 # relative to the repo root, so use match.files(), not pats.
2040 2040 filematcher = _makefollowlogfilematcher(repo, match.files(),
2041 2041 followfirst)
2042 2042 else:
2043 2043 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2044 2044 if filematcher is None:
2045 2045 filematcher = lambda rev: match
2046 2046
2047 2047 expr = []
2048 2048 for op, val in sorted(opts.iteritems()):
2049 2049 if not val:
2050 2050 continue
2051 2051 if op not in opt2revset:
2052 2052 continue
2053 2053 revop, andor = opt2revset[op]
2054 2054 if '%(val)' not in revop:
2055 2055 expr.append(revop)
2056 2056 else:
2057 2057 if not isinstance(val, list):
2058 2058 e = revop % {'val': val}
2059 2059 else:
2060 2060 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2061 2061 expr.append(e)
2062 2062
2063 2063 if expr:
2064 2064 expr = '(' + ' and '.join(expr) + ')'
2065 2065 else:
2066 2066 expr = None
2067 2067 return expr, filematcher
2068 2068
2069 2069 def _logrevs(repo, opts):
2070 2070 # Default --rev value depends on --follow but --follow behavior
2071 2071 # depends on revisions resolved from --rev...
2072 2072 follow = opts.get('follow') or opts.get('follow_first')
2073 2073 if opts.get('rev'):
2074 2074 revs = scmutil.revrange(repo, opts['rev'])
2075 2075 elif follow and repo.dirstate.p1() == nullid:
2076 2076 revs = revset.baseset()
2077 2077 elif follow:
2078 2078 revs = repo.revs('reverse(:.)')
2079 2079 else:
2080 2080 revs = revset.spanset(repo)
2081 2081 revs.reverse()
2082 2082 return revs
2083 2083
2084 2084 def getgraphlogrevs(repo, pats, opts):
2085 2085 """Return (revs, expr, filematcher) where revs is an iterable of
2086 2086 revision numbers, expr is a revset string built from log options
2087 2087 and file patterns or None, and used to filter 'revs'. If --stat or
2088 2088 --patch are not passed filematcher is None. Otherwise it is a
2089 2089 callable taking a revision number and returning a match objects
2090 2090 filtering the files to be detailed when displaying the revision.
2091 2091 """
2092 2092 limit = loglimit(opts)
2093 2093 revs = _logrevs(repo, opts)
2094 2094 if not revs:
2095 2095 return revset.baseset(), None, None
2096 2096 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2097 2097 if opts.get('rev'):
2098 2098 # User-specified revs might be unsorted, but don't sort before
2099 2099 # _makelogrevset because it might depend on the order of revs
2100 2100 revs.sort(reverse=True)
2101 2101 if expr:
2102 2102 # Revset matchers often operate faster on revisions in changelog
2103 2103 # order, because most filters deal with the changelog.
2104 2104 revs.reverse()
2105 2105 matcher = revset.match(repo.ui, expr)
2106 2106 # Revset matches can reorder revisions. "A or B" typically returns
2107 2107 # returns the revision matching A then the revision matching B. Sort
2108 2108 # again to fix that.
2109 2109 revs = matcher(repo, revs)
2110 2110 revs.sort(reverse=True)
2111 2111 if limit is not None:
2112 2112 limitedrevs = []
2113 2113 for idx, rev in enumerate(revs):
2114 2114 if idx >= limit:
2115 2115 break
2116 2116 limitedrevs.append(rev)
2117 2117 revs = revset.baseset(limitedrevs)
2118 2118
2119 2119 return revs, expr, filematcher
2120 2120
2121 2121 def getlogrevs(repo, pats, opts):
2122 2122 """Return (revs, expr, filematcher) where revs is an iterable of
2123 2123 revision numbers, expr is a revset string built from log options
2124 2124 and file patterns or None, and used to filter 'revs'. If --stat or
2125 2125 --patch are not passed filematcher is None. Otherwise it is a
2126 2126 callable taking a revision number and returning a match objects
2127 2127 filtering the files to be detailed when displaying the revision.
2128 2128 """
2129 2129 limit = loglimit(opts)
2130 2130 revs = _logrevs(repo, opts)
2131 2131 if not revs:
2132 2132 return revset.baseset([]), None, None
2133 2133 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2134 2134 if expr:
2135 2135 # Revset matchers often operate faster on revisions in changelog
2136 2136 # order, because most filters deal with the changelog.
2137 2137 if not opts.get('rev'):
2138 2138 revs.reverse()
2139 2139 matcher = revset.match(repo.ui, expr)
2140 2140 # Revset matches can reorder revisions. "A or B" typically returns
2141 2141 # returns the revision matching A then the revision matching B. Sort
2142 2142 # again to fix that.
2143 2143 revs = matcher(repo, revs)
2144 2144 if not opts.get('rev'):
2145 2145 revs.sort(reverse=True)
2146 2146 if limit is not None:
2147 2147 limitedrevs = []
2148 2148 for idx, r in enumerate(revs):
2149 2149 if limit <= idx:
2150 2150 break
2151 2151 limitedrevs.append(r)
2152 2152 revs = revset.baseset(limitedrevs)
2153 2153
2154 2154 return revs, expr, filematcher
2155 2155
2156 2156 def displaygraph(ui, dag, displayer, showparents, edgefn, getrenamed=None,
2157 2157 filematcher=None):
2158 2158 seen, state = [], graphmod.asciistate()
2159 2159 for rev, type, ctx, parents in dag:
2160 2160 char = 'o'
2161 2161 if ctx.node() in showparents:
2162 2162 char = '@'
2163 2163 elif ctx.obsolete():
2164 2164 char = 'x'
2165 2165 elif ctx.closesbranch():
2166 2166 char = '_'
2167 2167 copies = None
2168 2168 if getrenamed and ctx.rev():
2169 2169 copies = []
2170 2170 for fn in ctx.files():
2171 2171 rename = getrenamed(fn, ctx.rev())
2172 2172 if rename:
2173 2173 copies.append((fn, rename[0]))
2174 2174 revmatchfn = None
2175 2175 if filematcher is not None:
2176 2176 revmatchfn = filematcher(ctx.rev())
2177 2177 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2178 2178 lines = displayer.hunk.pop(rev).split('\n')
2179 2179 if not lines[-1]:
2180 2180 del lines[-1]
2181 2181 displayer.flush(ctx)
2182 2182 edges = edgefn(type, char, lines, seen, rev, parents)
2183 2183 for type, char, lines, coldata in edges:
2184 2184 graphmod.ascii(ui, state, type, char, lines, coldata)
2185 2185 displayer.close()
2186 2186
2187 2187 def graphlog(ui, repo, *pats, **opts):
2188 2188 # Parameters are identical to log command ones
2189 2189 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2190 2190 revdag = graphmod.dagwalker(repo, revs)
2191 2191
2192 2192 getrenamed = None
2193 2193 if opts.get('copies'):
2194 2194 endrev = None
2195 2195 if opts.get('rev'):
2196 2196 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2197 2197 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2198 2198 displayer = show_changeset(ui, repo, opts, buffered=True)
2199 2199 showparents = [ctx.node() for ctx in repo[None].parents()]
2200 2200 displaygraph(ui, revdag, displayer, showparents,
2201 2201 graphmod.asciiedges, getrenamed, filematcher)
2202 2202
2203 2203 def checkunsupportedgraphflags(pats, opts):
2204 2204 for op in ["newest_first"]:
2205 2205 if op in opts and opts[op]:
2206 2206 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2207 2207 % op.replace("_", "-"))
2208 2208
2209 2209 def graphrevs(repo, nodes, opts):
2210 2210 limit = loglimit(opts)
2211 2211 nodes.reverse()
2212 2212 if limit is not None:
2213 2213 nodes = nodes[:limit]
2214 2214 return graphmod.nodes(repo, nodes)
2215 2215
2216 2216 def add(ui, repo, match, prefix, explicitonly, **opts):
2217 2217 join = lambda f: os.path.join(prefix, f)
2218 2218 bad = []
2219 2219
2220 2220 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2221 2221 names = []
2222 2222 wctx = repo[None]
2223 2223 cca = None
2224 2224 abort, warn = scmutil.checkportabilityalert(ui)
2225 2225 if abort or warn:
2226 2226 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2227 2227
2228 2228 badmatch = matchmod.badmatch(match, badfn)
2229 2229 dirstate = repo.dirstate
2230 2230 # We don't want to just call wctx.walk here, since it would return a lot of
2231 2231 # clean files, which we aren't interested in and takes time.
2232 2232 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2233 2233 True, False, full=False)):
2234 2234 exact = match.exact(f)
2235 2235 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2236 2236 if cca:
2237 2237 cca(f)
2238 2238 names.append(f)
2239 2239 if ui.verbose or not exact:
2240 2240 ui.status(_('adding %s\n') % match.rel(f))
2241 2241
2242 2242 for subpath in sorted(wctx.substate):
2243 2243 sub = wctx.sub(subpath)
2244 2244 try:
2245 2245 submatch = matchmod.narrowmatcher(subpath, match)
2246 2246 if opts.get('subrepos'):
2247 2247 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2248 2248 else:
2249 2249 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2250 2250 except error.LookupError:
2251 2251 ui.status(_("skipping missing subrepository: %s\n")
2252 2252 % join(subpath))
2253 2253
2254 2254 if not opts.get('dry_run'):
2255 2255 rejected = wctx.add(names, prefix)
2256 2256 bad.extend(f for f in rejected if f in match.files())
2257 2257 return bad
2258 2258
2259 2259 def forget(ui, repo, match, prefix, explicitonly):
2260 2260 join = lambda f: os.path.join(prefix, f)
2261 2261 bad = []
2262 2262 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2263 2263 wctx = repo[None]
2264 2264 forgot = []
2265 2265
2266 2266 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2267 2267 forget = sorted(s[0] + s[1] + s[3] + s[6])
2268 2268 if explicitonly:
2269 2269 forget = [f for f in forget if match.exact(f)]
2270 2270
2271 2271 for subpath in sorted(wctx.substate):
2272 2272 sub = wctx.sub(subpath)
2273 2273 try:
2274 2274 submatch = matchmod.narrowmatcher(subpath, match)
2275 2275 subbad, subforgot = sub.forget(submatch, prefix)
2276 2276 bad.extend([subpath + '/' + f for f in subbad])
2277 2277 forgot.extend([subpath + '/' + f for f in subforgot])
2278 2278 except error.LookupError:
2279 2279 ui.status(_("skipping missing subrepository: %s\n")
2280 2280 % join(subpath))
2281 2281
2282 2282 if not explicitonly:
2283 2283 for f in match.files():
2284 2284 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2285 2285 if f not in forgot:
2286 2286 if repo.wvfs.exists(f):
2287 2287 # Don't complain if the exact case match wasn't given.
2288 2288 # But don't do this until after checking 'forgot', so
2289 2289 # that subrepo files aren't normalized, and this op is
2290 2290 # purely from data cached by the status walk above.
2291 2291 if repo.dirstate.normalize(f) in repo.dirstate:
2292 2292 continue
2293 2293 ui.warn(_('not removing %s: '
2294 2294 'file is already untracked\n')
2295 2295 % match.rel(f))
2296 2296 bad.append(f)
2297 2297
2298 2298 for f in forget:
2299 2299 if ui.verbose or not match.exact(f):
2300 2300 ui.status(_('removing %s\n') % match.rel(f))
2301 2301
2302 2302 rejected = wctx.forget(forget, prefix)
2303 2303 bad.extend(f for f in rejected if f in match.files())
2304 2304 forgot.extend(f for f in forget if f not in rejected)
2305 2305 return bad, forgot
2306 2306
2307 2307 def files(ui, ctx, m, fm, fmt, subrepos):
2308 2308 rev = ctx.rev()
2309 2309 ret = 1
2310 2310 ds = ctx.repo().dirstate
2311 2311
2312 2312 for f in ctx.matches(m):
2313 2313 if rev is None and ds[f] == 'r':
2314 2314 continue
2315 2315 fm.startitem()
2316 2316 if ui.verbose:
2317 2317 fc = ctx[f]
2318 2318 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2319 2319 fm.data(abspath=f)
2320 2320 fm.write('path', fmt, m.rel(f))
2321 2321 ret = 0
2322 2322
2323 2323 for subpath in sorted(ctx.substate):
2324 2324 def matchessubrepo(subpath):
2325 2325 return (m.always() or m.exact(subpath)
2326 2326 or any(f.startswith(subpath + '/') for f in m.files()))
2327 2327
2328 2328 if subrepos or matchessubrepo(subpath):
2329 2329 sub = ctx.sub(subpath)
2330 2330 try:
2331 2331 submatch = matchmod.narrowmatcher(subpath, m)
2332 2332 if sub.printfiles(ui, submatch, fm, fmt, subrepos) == 0:
2333 2333 ret = 0
2334 2334 except error.LookupError:
2335 2335 ui.status(_("skipping missing subrepository: %s\n")
2336 2336 % m.abs(subpath))
2337 2337
2338 2338 return ret
2339 2339
2340 2340 def remove(ui, repo, m, prefix, after, force, subrepos):
2341 2341 join = lambda f: os.path.join(prefix, f)
2342 2342 ret = 0
2343 2343 s = repo.status(match=m, clean=True)
2344 2344 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2345 2345
2346 2346 wctx = repo[None]
2347 2347
2348 2348 for subpath in sorted(wctx.substate):
2349 2349 def matchessubrepo(matcher, subpath):
2350 2350 if matcher.exact(subpath):
2351 2351 return True
2352 2352 for f in matcher.files():
2353 2353 if f.startswith(subpath):
2354 2354 return True
2355 2355 return False
2356 2356
2357 2357 if subrepos or matchessubrepo(m, subpath):
2358 2358 sub = wctx.sub(subpath)
2359 2359 try:
2360 2360 submatch = matchmod.narrowmatcher(subpath, m)
2361 2361 if sub.removefiles(submatch, prefix, after, force, subrepos):
2362 2362 ret = 1
2363 2363 except error.LookupError:
2364 2364 ui.status(_("skipping missing subrepository: %s\n")
2365 2365 % join(subpath))
2366 2366
2367 2367 # warn about failure to delete explicit files/dirs
2368 2368 deleteddirs = util.dirs(deleted)
2369 2369 for f in m.files():
2370 2370 def insubrepo():
2371 2371 for subpath in wctx.substate:
2372 2372 if f.startswith(subpath):
2373 2373 return True
2374 2374 return False
2375 2375
2376 2376 isdir = f in deleteddirs or wctx.hasdir(f)
2377 2377 if f in repo.dirstate or isdir or f == '.' or insubrepo():
2378 2378 continue
2379 2379
2380 2380 if repo.wvfs.exists(f):
2381 2381 if repo.wvfs.isdir(f):
2382 2382 ui.warn(_('not removing %s: no tracked files\n')
2383 2383 % m.rel(f))
2384 2384 else:
2385 2385 ui.warn(_('not removing %s: file is untracked\n')
2386 2386 % m.rel(f))
2387 2387 # missing files will generate a warning elsewhere
2388 2388 ret = 1
2389 2389
2390 2390 if force:
2391 2391 list = modified + deleted + clean + added
2392 2392 elif after:
2393 2393 list = deleted
2394 2394 for f in modified + added + clean:
2395 2395 ui.warn(_('not removing %s: file still exists\n') % m.rel(f))
2396 2396 ret = 1
2397 2397 else:
2398 2398 list = deleted + clean
2399 2399 for f in modified:
2400 2400 ui.warn(_('not removing %s: file is modified (use -f'
2401 2401 ' to force removal)\n') % m.rel(f))
2402 2402 ret = 1
2403 2403 for f in added:
2404 2404 ui.warn(_('not removing %s: file has been marked for add'
2405 2405 ' (use forget to undo)\n') % m.rel(f))
2406 2406 ret = 1
2407 2407
2408 2408 for f in sorted(list):
2409 2409 if ui.verbose or not m.exact(f):
2410 2410 ui.status(_('removing %s\n') % m.rel(f))
2411 2411
2412 2412 wlock = repo.wlock()
2413 2413 try:
2414 2414 if not after:
2415 2415 for f in list:
2416 2416 if f in added:
2417 2417 continue # we never unlink added files on remove
2418 2418 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
2419 2419 repo[None].forget(list)
2420 2420 finally:
2421 2421 wlock.release()
2422 2422
2423 2423 return ret
2424 2424
2425 2425 def cat(ui, repo, ctx, matcher, prefix, **opts):
2426 2426 err = 1
2427 2427
2428 2428 def write(path):
2429 2429 fp = makefileobj(repo, opts.get('output'), ctx.node(),
2430 2430 pathname=os.path.join(prefix, path))
2431 2431 data = ctx[path].data()
2432 2432 if opts.get('decode'):
2433 2433 data = repo.wwritedata(path, data)
2434 2434 fp.write(data)
2435 2435 fp.close()
2436 2436
2437 2437 # Automation often uses hg cat on single files, so special case it
2438 2438 # for performance to avoid the cost of parsing the manifest.
2439 2439 if len(matcher.files()) == 1 and not matcher.anypats():
2440 2440 file = matcher.files()[0]
2441 2441 mf = repo.manifest
2442 2442 mfnode = ctx.manifestnode()
2443 2443 if mfnode and mf.find(mfnode, file)[0]:
2444 2444 write(file)
2445 2445 return 0
2446 2446
2447 2447 # Don't warn about "missing" files that are really in subrepos
2448 2448 def badfn(path, msg):
2449 2449 for subpath in ctx.substate:
2450 2450 if path.startswith(subpath):
2451 2451 return
2452 2452 matcher.bad(path, msg)
2453 2453
2454 2454 for abs in ctx.walk(matchmod.badmatch(matcher, badfn)):
2455 2455 write(abs)
2456 2456 err = 0
2457 2457
2458 2458 for subpath in sorted(ctx.substate):
2459 2459 sub = ctx.sub(subpath)
2460 2460 try:
2461 2461 submatch = matchmod.narrowmatcher(subpath, matcher)
2462 2462
2463 2463 if not sub.cat(submatch, os.path.join(prefix, sub._path),
2464 2464 **opts):
2465 2465 err = 0
2466 2466 except error.RepoLookupError:
2467 2467 ui.status(_("skipping missing subrepository: %s\n")
2468 2468 % os.path.join(prefix, subpath))
2469 2469
2470 2470 return err
2471 2471
2472 2472 def commit(ui, repo, commitfunc, pats, opts):
2473 2473 '''commit the specified files or all outstanding changes'''
2474 2474 date = opts.get('date')
2475 2475 if date:
2476 2476 opts['date'] = util.parsedate(date)
2477 2477 message = logmessage(ui, opts)
2478 2478 matcher = scmutil.match(repo[None], pats, opts)
2479 2479
2480 2480 # extract addremove carefully -- this function can be called from a command
2481 2481 # that doesn't support addremove
2482 2482 if opts.get('addremove'):
2483 2483 if scmutil.addremove(repo, matcher, "", opts) != 0:
2484 2484 raise error.Abort(
2485 2485 _("failed to mark all new/missing files as added/removed"))
2486 2486
2487 2487 return commitfunc(ui, repo, message, matcher, opts)
2488 2488
2489 2489 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2490 2490 # avoid cycle context -> subrepo -> cmdutil
2491 2491 import context
2492 2492
2493 2493 # amend will reuse the existing user if not specified, but the obsolete
2494 2494 # marker creation requires that the current user's name is specified.
2495 2495 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2496 2496 ui.username() # raise exception if username not set
2497 2497
2498 2498 ui.note(_('amending changeset %s\n') % old)
2499 2499 base = old.p1()
2500 2500 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2501 2501
2502 2502 wlock = lock = newid = None
2503 2503 try:
2504 2504 wlock = repo.wlock()
2505 2505 lock = repo.lock()
2506 2506 tr = repo.transaction('amend')
2507 2507 try:
2508 2508 # See if we got a message from -m or -l, if not, open the editor
2509 2509 # with the message of the changeset to amend
2510 2510 message = logmessage(ui, opts)
2511 2511 # ensure logfile does not conflict with later enforcement of the
2512 2512 # message. potential logfile content has been processed by
2513 2513 # `logmessage` anyway.
2514 2514 opts.pop('logfile')
2515 2515 # First, do a regular commit to record all changes in the working
2516 2516 # directory (if there are any)
2517 2517 ui.callhooks = False
2518 2518 activebookmark = repo._activebookmark
2519 2519 try:
2520 2520 repo._activebookmark = None
2521 2521 opts['message'] = 'temporary amend commit for %s' % old
2522 2522 node = commit(ui, repo, commitfunc, pats, opts)
2523 2523 finally:
2524 2524 repo._activebookmark = activebookmark
2525 2525 ui.callhooks = True
2526 2526 ctx = repo[node]
2527 2527
2528 2528 # Participating changesets:
2529 2529 #
2530 2530 # node/ctx o - new (intermediate) commit that contains changes
2531 2531 # | from working dir to go into amending commit
2532 2532 # | (or a workingctx if there were no changes)
2533 2533 # |
2534 2534 # old o - changeset to amend
2535 2535 # |
2536 2536 # base o - parent of amending changeset
2537 2537
2538 2538 # Update extra dict from amended commit (e.g. to preserve graft
2539 2539 # source)
2540 2540 extra.update(old.extra())
2541 2541
2542 2542 # Also update it from the intermediate commit or from the wctx
2543 2543 extra.update(ctx.extra())
2544 2544
2545 2545 if len(old.parents()) > 1:
2546 2546 # ctx.files() isn't reliable for merges, so fall back to the
2547 2547 # slower repo.status() method
2548 2548 files = set([fn for st in repo.status(base, old)[:3]
2549 2549 for fn in st])
2550 2550 else:
2551 2551 files = set(old.files())
2552 2552
2553 2553 # Second, we use either the commit we just did, or if there were no
2554 2554 # changes the parent of the working directory as the version of the
2555 2555 # files in the final amend commit
2556 2556 if node:
2557 2557 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2558 2558
2559 2559 user = ctx.user()
2560 2560 date = ctx.date()
2561 2561 # Recompute copies (avoid recording a -> b -> a)
2562 2562 copied = copies.pathcopies(base, ctx)
2563 2563 if old.p2:
2564 2564 copied.update(copies.pathcopies(old.p2(), ctx))
2565 2565
2566 2566 # Prune files which were reverted by the updates: if old
2567 2567 # introduced file X and our intermediate commit, node,
2568 2568 # renamed that file, then those two files are the same and
2569 2569 # we can discard X from our list of files. Likewise if X
2570 2570 # was deleted, it's no longer relevant
2571 2571 files.update(ctx.files())
2572 2572
2573 2573 def samefile(f):
2574 2574 if f in ctx.manifest():
2575 2575 a = ctx.filectx(f)
2576 2576 if f in base.manifest():
2577 2577 b = base.filectx(f)
2578 2578 return (not a.cmp(b)
2579 2579 and a.flags() == b.flags())
2580 2580 else:
2581 2581 return False
2582 2582 else:
2583 2583 return f not in base.manifest()
2584 2584 files = [f for f in files if not samefile(f)]
2585 2585
2586 2586 def filectxfn(repo, ctx_, path):
2587 2587 try:
2588 2588 fctx = ctx[path]
2589 2589 flags = fctx.flags()
2590 2590 mctx = context.memfilectx(repo,
2591 2591 fctx.path(), fctx.data(),
2592 2592 islink='l' in flags,
2593 2593 isexec='x' in flags,
2594 2594 copied=copied.get(path))
2595 2595 return mctx
2596 2596 except KeyError:
2597 2597 return None
2598 2598 else:
2599 2599 ui.note(_('copying changeset %s to %s\n') % (old, base))
2600 2600
2601 2601 # Use version of files as in the old cset
2602 2602 def filectxfn(repo, ctx_, path):
2603 2603 try:
2604 2604 return old.filectx(path)
2605 2605 except KeyError:
2606 2606 return None
2607 2607
2608 2608 user = opts.get('user') or old.user()
2609 2609 date = opts.get('date') or old.date()
2610 2610 editform = mergeeditform(old, 'commit.amend')
2611 2611 editor = getcommiteditor(editform=editform, **opts)
2612 2612 if not message:
2613 2613 editor = getcommiteditor(edit=True, editform=editform)
2614 2614 message = old.description()
2615 2615
2616 2616 pureextra = extra.copy()
2617 2617 extra['amend_source'] = old.hex()
2618 2618
2619 2619 new = context.memctx(repo,
2620 2620 parents=[base.node(), old.p2().node()],
2621 2621 text=message,
2622 2622 files=files,
2623 2623 filectxfn=filectxfn,
2624 2624 user=user,
2625 2625 date=date,
2626 2626 extra=extra,
2627 2627 editor=editor)
2628 2628
2629 2629 newdesc = changelog.stripdesc(new.description())
2630 2630 if ((not node)
2631 2631 and newdesc == old.description()
2632 2632 and user == old.user()
2633 2633 and date == old.date()
2634 2634 and pureextra == old.extra()):
2635 2635 # nothing changed. continuing here would create a new node
2636 2636 # anyway because of the amend_source noise.
2637 2637 #
2638 2638 # This not what we expect from amend.
2639 2639 return old.node()
2640 2640
2641 2641 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2642 2642 try:
2643 2643 if opts.get('secret'):
2644 2644 commitphase = 'secret'
2645 2645 else:
2646 2646 commitphase = old.phase()
2647 2647 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2648 2648 newid = repo.commitctx(new)
2649 2649 finally:
2650 2650 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2651 2651 if newid != old.node():
2652 2652 # Reroute the working copy parent to the new changeset
2653 2653 repo.setparents(newid, nullid)
2654 2654
2655 2655 # Move bookmarks from old parent to amend commit
2656 2656 bms = repo.nodebookmarks(old.node())
2657 2657 if bms:
2658 2658 marks = repo._bookmarks
2659 2659 for bm in bms:
2660 2660 ui.debug('moving bookmarks %r from %s to %s\n' %
2661 2661 (marks, old.hex(), hex(newid)))
2662 2662 marks[bm] = newid
2663 2663 marks.recordchange(tr)
2664 2664 #commit the whole amend process
2665 2665 if createmarkers:
2666 2666 # mark the new changeset as successor of the rewritten one
2667 2667 new = repo[newid]
2668 2668 obs = [(old, (new,))]
2669 2669 if node:
2670 2670 obs.append((ctx, ()))
2671 2671
2672 2672 obsolete.createmarkers(repo, obs)
2673 2673 tr.close()
2674 2674 finally:
2675 2675 tr.release()
2676 2676 if not createmarkers and newid != old.node():
2677 2677 # Strip the intermediate commit (if there was one) and the amended
2678 2678 # commit
2679 2679 if node:
2680 2680 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2681 2681 ui.note(_('stripping amended changeset %s\n') % old)
2682 2682 repair.strip(ui, repo, old.node(), topic='amend-backup')
2683 2683 finally:
2684 2684 lockmod.release(lock, wlock)
2685 2685 return newid
2686 2686
2687 2687 def commiteditor(repo, ctx, subs, editform=''):
2688 2688 if ctx.description():
2689 2689 return ctx.description()
2690 2690 return commitforceeditor(repo, ctx, subs, editform=editform,
2691 2691 unchangedmessagedetection=True)
2692 2692
2693 2693 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2694 2694 editform='', unchangedmessagedetection=False):
2695 2695 if not extramsg:
2696 2696 extramsg = _("Leave message empty to abort commit.")
2697 2697
2698 2698 forms = [e for e in editform.split('.') if e]
2699 2699 forms.insert(0, 'changeset')
2700 2700 templatetext = None
2701 2701 while forms:
2702 2702 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2703 2703 if tmpl:
2704 2704 templatetext = committext = buildcommittemplate(
2705 2705 repo, ctx, subs, extramsg, tmpl)
2706 2706 break
2707 2707 forms.pop()
2708 2708 else:
2709 2709 committext = buildcommittext(repo, ctx, subs, extramsg)
2710 2710
2711 2711 # run editor in the repository root
2712 2712 olddir = os.getcwd()
2713 2713 os.chdir(repo.root)
2714 2714
2715 2715 # make in-memory changes visible to external process
2716 2716 tr = repo.currenttransaction()
2717 2717 repo.dirstate.write(tr)
2718 2718 pending = tr and tr.writepending() and repo.root
2719 2719
2720 2720 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2721 2721 editform=editform, pending=pending)
2722 2722 text = re.sub("(?m)^HG:.*(\n|$)", "", editortext)
2723 2723 os.chdir(olddir)
2724 2724
2725 2725 if finishdesc:
2726 2726 text = finishdesc(text)
2727 2727 if not text.strip():
2728 2728 raise error.Abort(_("empty commit message"))
2729 2729 if unchangedmessagedetection and editortext == templatetext:
2730 2730 raise error.Abort(_("commit message unchanged"))
2731 2731
2732 2732 return text
2733 2733
2734 2734 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2735 2735 ui = repo.ui
2736 2736 tmpl, mapfile = gettemplate(ui, tmpl, None)
2737 2737
2738 2738 try:
2739 2739 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2740 2740 except SyntaxError as inst:
2741 2741 raise error.Abort(inst.args[0])
2742 2742
2743 2743 for k, v in repo.ui.configitems('committemplate'):
2744 2744 if k != 'changeset':
2745 2745 t.t.cache[k] = v
2746 2746
2747 2747 if not extramsg:
2748 2748 extramsg = '' # ensure that extramsg is string
2749 2749
2750 2750 ui.pushbuffer()
2751 2751 t.show(ctx, extramsg=extramsg)
2752 2752 return ui.popbuffer()
2753 2753
2754 2754 def hgprefix(msg):
2755 2755 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2756 2756
2757 2757 def buildcommittext(repo, ctx, subs, extramsg):
2758 2758 edittext = []
2759 2759 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2760 2760 if ctx.description():
2761 2761 edittext.append(ctx.description())
2762 2762 edittext.append("")
2763 2763 edittext.append("") # Empty line between message and comments.
2764 2764 edittext.append(hgprefix(_("Enter commit message."
2765 2765 " Lines beginning with 'HG:' are removed.")))
2766 2766 edittext.append(hgprefix(extramsg))
2767 2767 edittext.append("HG: --")
2768 2768 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2769 2769 if ctx.p2():
2770 2770 edittext.append(hgprefix(_("branch merge")))
2771 2771 if ctx.branch():
2772 2772 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2773 2773 if bookmarks.isactivewdirparent(repo):
2774 2774 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2775 2775 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2776 2776 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2777 2777 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2778 2778 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2779 2779 if not added and not modified and not removed:
2780 2780 edittext.append(hgprefix(_("no files changed")))
2781 2781 edittext.append("")
2782 2782
2783 2783 return "\n".join(edittext)
2784 2784
2785 2785 def commitstatus(repo, node, branch, bheads=None, opts=None):
2786 2786 if opts is None:
2787 2787 opts = {}
2788 2788 ctx = repo[node]
2789 2789 parents = ctx.parents()
2790 2790
2791 2791 if (not opts.get('amend') and bheads and node not in bheads and not
2792 2792 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2793 2793 repo.ui.status(_('created new head\n'))
2794 2794 # The message is not printed for initial roots. For the other
2795 2795 # changesets, it is printed in the following situations:
2796 2796 #
2797 2797 # Par column: for the 2 parents with ...
2798 2798 # N: null or no parent
2799 2799 # B: parent is on another named branch
2800 2800 # C: parent is a regular non head changeset
2801 2801 # H: parent was a branch head of the current branch
2802 2802 # Msg column: whether we print "created new head" message
2803 2803 # In the following, it is assumed that there already exists some
2804 2804 # initial branch heads of the current branch, otherwise nothing is
2805 2805 # printed anyway.
2806 2806 #
2807 2807 # Par Msg Comment
2808 2808 # N N y additional topo root
2809 2809 #
2810 2810 # B N y additional branch root
2811 2811 # C N y additional topo head
2812 2812 # H N n usual case
2813 2813 #
2814 2814 # B B y weird additional branch root
2815 2815 # C B y branch merge
2816 2816 # H B n merge with named branch
2817 2817 #
2818 2818 # C C y additional head from merge
2819 2819 # C H n merge with a head
2820 2820 #
2821 2821 # H H n head merge: head count decreases
2822 2822
2823 2823 if not opts.get('close_branch'):
2824 2824 for r in parents:
2825 2825 if r.closesbranch() and r.branch() == branch:
2826 2826 repo.ui.status(_('reopening closed branch head %d\n') % r)
2827 2827
2828 2828 if repo.ui.debugflag:
2829 2829 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2830 2830 elif repo.ui.verbose:
2831 2831 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2832 2832
2833 2833 def revert(ui, repo, ctx, parents, *pats, **opts):
2834 2834 parent, p2 = parents
2835 2835 node = ctx.node()
2836 2836
2837 2837 mf = ctx.manifest()
2838 2838 if node == p2:
2839 2839 parent = p2
2840 2840 if node == parent:
2841 2841 pmf = mf
2842 2842 else:
2843 2843 pmf = None
2844 2844
2845 2845 # need all matching names in dirstate and manifest of target rev,
2846 2846 # so have to walk both. do not print errors if files exist in one
2847 2847 # but not other. in both cases, filesets should be evaluated against
2848 2848 # workingctx to get consistent result (issue4497). this means 'set:**'
2849 2849 # cannot be used to select missing files from target rev.
2850 2850
2851 2851 # `names` is a mapping for all elements in working copy and target revision
2852 2852 # The mapping is in the form:
2853 2853 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2854 2854 names = {}
2855 2855
2856 2856 wlock = repo.wlock()
2857 2857 try:
2858 2858 ## filling of the `names` mapping
2859 2859 # walk dirstate to fill `names`
2860 2860
2861 2861 interactive = opts.get('interactive', False)
2862 2862 wctx = repo[None]
2863 2863 m = scmutil.match(wctx, pats, opts)
2864 2864
2865 2865 # we'll need this later
2866 2866 targetsubs = sorted(s for s in wctx.substate if m(s))
2867 2867
2868 2868 if not m.always():
2869 2869 for abs in repo.walk(matchmod.badmatch(m, lambda x, y: False)):
2870 2870 names[abs] = m.rel(abs), m.exact(abs)
2871 2871
2872 2872 # walk target manifest to fill `names`
2873 2873
2874 2874 def badfn(path, msg):
2875 2875 if path in names:
2876 2876 return
2877 2877 if path in ctx.substate:
2878 2878 return
2879 2879 path_ = path + '/'
2880 2880 for f in names:
2881 2881 if f.startswith(path_):
2882 2882 return
2883 2883 ui.warn("%s: %s\n" % (m.rel(path), msg))
2884 2884
2885 2885 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
2886 2886 if abs not in names:
2887 2887 names[abs] = m.rel(abs), m.exact(abs)
2888 2888
2889 2889 # Find status of all file in `names`.
2890 2890 m = scmutil.matchfiles(repo, names)
2891 2891
2892 2892 changes = repo.status(node1=node, match=m,
2893 2893 unknown=True, ignored=True, clean=True)
2894 2894 else:
2895 2895 changes = repo.status(node1=node, match=m)
2896 2896 for kind in changes:
2897 2897 for abs in kind:
2898 2898 names[abs] = m.rel(abs), m.exact(abs)
2899 2899
2900 2900 m = scmutil.matchfiles(repo, names)
2901 2901
2902 2902 modified = set(changes.modified)
2903 2903 added = set(changes.added)
2904 2904 removed = set(changes.removed)
2905 2905 _deleted = set(changes.deleted)
2906 2906 unknown = set(changes.unknown)
2907 2907 unknown.update(changes.ignored)
2908 2908 clean = set(changes.clean)
2909 2909 modadded = set()
2910 2910
2911 2911 # split between files known in target manifest and the others
2912 2912 smf = set(mf)
2913 2913
2914 2914 # determine the exact nature of the deleted changesets
2915 2915 deladded = _deleted - smf
2916 2916 deleted = _deleted - deladded
2917 2917
2918 2918 # We need to account for the state of the file in the dirstate,
2919 2919 # even when we revert against something else than parent. This will
2920 2920 # slightly alter the behavior of revert (doing back up or not, delete
2921 2921 # or just forget etc).
2922 2922 if parent == node:
2923 2923 dsmodified = modified
2924 2924 dsadded = added
2925 2925 dsremoved = removed
2926 2926 # store all local modifications, useful later for rename detection
2927 2927 localchanges = dsmodified | dsadded
2928 2928 modified, added, removed = set(), set(), set()
2929 2929 else:
2930 2930 changes = repo.status(node1=parent, match=m)
2931 2931 dsmodified = set(changes.modified)
2932 2932 dsadded = set(changes.added)
2933 2933 dsremoved = set(changes.removed)
2934 2934 # store all local modifications, useful later for rename detection
2935 2935 localchanges = dsmodified | dsadded
2936 2936
2937 2937 # only take into account for removes between wc and target
2938 2938 clean |= dsremoved - removed
2939 2939 dsremoved &= removed
2940 2940 # distinct between dirstate remove and other
2941 2941 removed -= dsremoved
2942 2942
2943 2943 modadded = added & dsmodified
2944 2944 added -= modadded
2945 2945
2946 2946 # tell newly modified apart.
2947 2947 dsmodified &= modified
2948 2948 dsmodified |= modified & dsadded # dirstate added may needs backup
2949 2949 modified -= dsmodified
2950 2950
2951 2951 # We need to wait for some post-processing to update this set
2952 2952 # before making the distinction. The dirstate will be used for
2953 2953 # that purpose.
2954 2954 dsadded = added
2955 2955
2956 2956 # in case of merge, files that are actually added can be reported as
2957 2957 # modified, we need to post process the result
2958 2958 if p2 != nullid:
2959 2959 if pmf is None:
2960 2960 # only need parent manifest in the merge case,
2961 2961 # so do not read by default
2962 2962 pmf = repo[parent].manifest()
2963 2963 mergeadd = dsmodified - set(pmf)
2964 2964 dsadded |= mergeadd
2965 2965 dsmodified -= mergeadd
2966 2966
2967 2967 # if f is a rename, update `names` to also revert the source
2968 2968 cwd = repo.getcwd()
2969 2969 for f in localchanges:
2970 2970 src = repo.dirstate.copied(f)
2971 2971 # XXX should we check for rename down to target node?
2972 2972 if src and src not in names and repo.dirstate[src] == 'r':
2973 2973 dsremoved.add(src)
2974 2974 names[src] = (repo.pathto(src, cwd), True)
2975 2975
2976 2976 # distinguish between file to forget and the other
2977 2977 added = set()
2978 2978 for abs in dsadded:
2979 2979 if repo.dirstate[abs] != 'a':
2980 2980 added.add(abs)
2981 2981 dsadded -= added
2982 2982
2983 2983 for abs in deladded:
2984 2984 if repo.dirstate[abs] == 'a':
2985 2985 dsadded.add(abs)
2986 2986 deladded -= dsadded
2987 2987
2988 2988 # For files marked as removed, we check if an unknown file is present at
2989 2989 # the same path. If a such file exists it may need to be backed up.
2990 2990 # Making the distinction at this stage helps have simpler backup
2991 2991 # logic.
2992 2992 removunk = set()
2993 2993 for abs in removed:
2994 2994 target = repo.wjoin(abs)
2995 2995 if os.path.lexists(target):
2996 2996 removunk.add(abs)
2997 2997 removed -= removunk
2998 2998
2999 2999 dsremovunk = set()
3000 3000 for abs in dsremoved:
3001 3001 target = repo.wjoin(abs)
3002 3002 if os.path.lexists(target):
3003 3003 dsremovunk.add(abs)
3004 3004 dsremoved -= dsremovunk
3005 3005
3006 3006 # action to be actually performed by revert
3007 3007 # (<list of file>, message>) tuple
3008 3008 actions = {'revert': ([], _('reverting %s\n')),
3009 3009 'add': ([], _('adding %s\n')),
3010 3010 'remove': ([], _('removing %s\n')),
3011 3011 'drop': ([], _('removing %s\n')),
3012 3012 'forget': ([], _('forgetting %s\n')),
3013 3013 'undelete': ([], _('undeleting %s\n')),
3014 3014 'noop': (None, _('no changes needed to %s\n')),
3015 3015 'unknown': (None, _('file not managed: %s\n')),
3016 3016 }
3017 3017
3018 3018 # "constant" that convey the backup strategy.
3019 3019 # All set to `discard` if `no-backup` is set do avoid checking
3020 3020 # no_backup lower in the code.
3021 3021 # These values are ordered for comparison purposes
3022 3022 backup = 2 # unconditionally do backup
3023 3023 check = 1 # check if the existing file differs from target
3024 3024 discard = 0 # never do backup
3025 3025 if opts.get('no_backup'):
3026 3026 backup = check = discard
3027 3027
3028 3028 backupanddel = actions['remove']
3029 3029 if not opts.get('no_backup'):
3030 3030 backupanddel = actions['drop']
3031 3031
3032 3032 disptable = (
3033 3033 # dispatch table:
3034 3034 # file state
3035 3035 # action
3036 3036 # make backup
3037 3037
3038 3038 ## Sets that results that will change file on disk
3039 3039 # Modified compared to target, no local change
3040 3040 (modified, actions['revert'], discard),
3041 3041 # Modified compared to target, but local file is deleted
3042 3042 (deleted, actions['revert'], discard),
3043 3043 # Modified compared to target, local change
3044 3044 (dsmodified, actions['revert'], backup),
3045 3045 # Added since target
3046 3046 (added, actions['remove'], discard),
3047 3047 # Added in working directory
3048 3048 (dsadded, actions['forget'], discard),
3049 3049 # Added since target, have local modification
3050 3050 (modadded, backupanddel, backup),
3051 3051 # Added since target but file is missing in working directory
3052 3052 (deladded, actions['drop'], discard),
3053 3053 # Removed since target, before working copy parent
3054 3054 (removed, actions['add'], discard),
3055 3055 # Same as `removed` but an unknown file exists at the same path
3056 3056 (removunk, actions['add'], check),
3057 3057 # Removed since targe, marked as such in working copy parent
3058 3058 (dsremoved, actions['undelete'], discard),
3059 3059 # Same as `dsremoved` but an unknown file exists at the same path
3060 3060 (dsremovunk, actions['undelete'], check),
3061 3061 ## the following sets does not result in any file changes
3062 3062 # File with no modification
3063 3063 (clean, actions['noop'], discard),
3064 3064 # Existing file, not tracked anywhere
3065 3065 (unknown, actions['unknown'], discard),
3066 3066 )
3067 3067
3068 3068 for abs, (rel, exact) in sorted(names.items()):
3069 3069 # target file to be touch on disk (relative to cwd)
3070 3070 target = repo.wjoin(abs)
3071 3071 # search the entry in the dispatch table.
3072 3072 # if the file is in any of these sets, it was touched in the working
3073 3073 # directory parent and we are sure it needs to be reverted.
3074 3074 for table, (xlist, msg), dobackup in disptable:
3075 3075 if abs not in table:
3076 3076 continue
3077 3077 if xlist is not None:
3078 3078 xlist.append(abs)
3079 3079 if dobackup and (backup <= dobackup
3080 3080 or wctx[abs].cmp(ctx[abs])):
3081 3081 bakname = "%s.orig" % rel
3082 3082 ui.note(_('saving current version of %s as %s\n') %
3083 3083 (rel, bakname))
3084 3084 if not opts.get('dry_run'):
3085 3085 if interactive:
3086 3086 util.copyfile(target, bakname)
3087 3087 else:
3088 3088 util.rename(target, bakname)
3089 3089 if ui.verbose or not exact:
3090 3090 if not isinstance(msg, basestring):
3091 3091 msg = msg(abs)
3092 3092 ui.status(msg % rel)
3093 3093 elif exact:
3094 3094 ui.warn(msg % rel)
3095 3095 break
3096 3096
3097 3097 if not opts.get('dry_run'):
3098 3098 needdata = ('revert', 'add', 'undelete')
3099 3099 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3100 3100 _performrevert(repo, parents, ctx, actions, interactive)
3101 3101
3102 3102 if targetsubs:
3103 3103 # Revert the subrepos on the revert list
3104 3104 for sub in targetsubs:
3105 3105 try:
3106 3106 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3107 3107 except KeyError:
3108 3108 raise error.Abort("subrepository '%s' does not exist in %s!"
3109 3109 % (sub, short(ctx.node())))
3110 3110 finally:
3111 3111 wlock.release()
3112 3112
3113 3113 def _revertprefetch(repo, ctx, *files):
3114 3114 """Let extension changing the storage layer prefetch content"""
3115 3115 pass
3116 3116
3117 3117 def _performrevert(repo, parents, ctx, actions, interactive=False):
3118 3118 """function that actually perform all the actions computed for revert
3119 3119
3120 3120 This is an independent function to let extension to plug in and react to
3121 3121 the imminent revert.
3122 3122
3123 3123 Make sure you have the working directory locked when calling this function.
3124 3124 """
3125 3125 parent, p2 = parents
3126 3126 node = ctx.node()
3127 3127 def checkout(f):
3128 3128 fc = ctx[f]
3129 3129 repo.wwrite(f, fc.data(), fc.flags())
3130 3130
3131 3131 audit_path = pathutil.pathauditor(repo.root)
3132 3132 for f in actions['forget'][0]:
3133 3133 repo.dirstate.drop(f)
3134 3134 for f in actions['remove'][0]:
3135 3135 audit_path(f)
3136 3136 try:
3137 3137 util.unlinkpath(repo.wjoin(f))
3138 3138 except OSError:
3139 3139 pass
3140 3140 repo.dirstate.remove(f)
3141 3141 for f in actions['drop'][0]:
3142 3142 audit_path(f)
3143 3143 repo.dirstate.remove(f)
3144 3144
3145 3145 normal = None
3146 3146 if node == parent:
3147 3147 # We're reverting to our parent. If possible, we'd like status
3148 3148 # to report the file as clean. We have to use normallookup for
3149 3149 # merges to avoid losing information about merged/dirty files.
3150 3150 if p2 != nullid:
3151 3151 normal = repo.dirstate.normallookup
3152 3152 else:
3153 3153 normal = repo.dirstate.normal
3154 3154
3155 3155 newlyaddedandmodifiedfiles = set()
3156 3156 if interactive:
3157 3157 # Prompt the user for changes to revert
3158 3158 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3159 3159 m = scmutil.match(ctx, torevert, {})
3160 3160 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3161 3161 diffopts.nodates = True
3162 3162 diffopts.git = True
3163 3163 reversehunks = repo.ui.configbool('experimental',
3164 3164 'revertalternateinteractivemode',
3165 3165 True)
3166 3166 if reversehunks:
3167 3167 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3168 3168 else:
3169 3169 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3170 3170 originalchunks = patch.parsepatch(diff)
3171 3171
3172 3172 try:
3173 3173
3174 3174 chunks = recordfilter(repo.ui, originalchunks)
3175 3175 if reversehunks:
3176 3176 chunks = patch.reversehunks(chunks)
3177 3177
3178 3178 except patch.PatchError as err:
3179 3179 raise error.Abort(_('error parsing patch: %s') % err)
3180 3180
3181 3181 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3182 3182 # Apply changes
3183 3183 fp = cStringIO.StringIO()
3184 3184 for c in chunks:
3185 3185 c.write(fp)
3186 3186 dopatch = fp.tell()
3187 3187 fp.seek(0)
3188 3188 if dopatch:
3189 3189 try:
3190 3190 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3191 3191 except patch.PatchError as err:
3192 3192 raise error.Abort(str(err))
3193 3193 del fp
3194 3194 else:
3195 3195 for f in actions['revert'][0]:
3196 3196 checkout(f)
3197 3197 if normal:
3198 3198 normal(f)
3199 3199
3200 3200 for f in actions['add'][0]:
3201 3201 # Don't checkout modified files, they are already created by the diff
3202 3202 if f not in newlyaddedandmodifiedfiles:
3203 3203 checkout(f)
3204 3204 repo.dirstate.add(f)
3205 3205
3206 3206 normal = repo.dirstate.normallookup
3207 3207 if node == parent and p2 == nullid:
3208 3208 normal = repo.dirstate.normal
3209 3209 for f in actions['undelete'][0]:
3210 3210 checkout(f)
3211 3211 normal(f)
3212 3212
3213 3213 copied = copies.pathcopies(repo[parent], ctx)
3214 3214
3215 3215 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3216 3216 if f in copied:
3217 3217 repo.dirstate.copy(copied[f], f)
3218 3218
3219 3219 def command(table):
3220 3220 """Returns a function object to be used as a decorator for making commands.
3221 3221
3222 3222 This function receives a command table as its argument. The table should
3223 3223 be a dict.
3224 3224
3225 3225 The returned function can be used as a decorator for adding commands
3226 3226 to that command table. This function accepts multiple arguments to define
3227 3227 a command.
3228 3228
3229 3229 The first argument is the command name.
3230 3230
3231 3231 The options argument is an iterable of tuples defining command arguments.
3232 3232 See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
3233 3233
3234 3234 The synopsis argument defines a short, one line summary of how to use the
3235 3235 command. This shows up in the help output.
3236 3236
3237 3237 The norepo argument defines whether the command does not require a
3238 3238 local repository. Most commands operate against a repository, thus the
3239 3239 default is False.
3240 3240
3241 3241 The optionalrepo argument defines whether the command optionally requires
3242 3242 a local repository.
3243 3243
3244 3244 The inferrepo argument defines whether to try to find a repository from the
3245 3245 command line arguments. If True, arguments will be examined for potential
3246 3246 repository locations. See ``findrepo()``. If a repository is found, it
3247 3247 will be used.
3248 3248 """
3249 3249 def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False,
3250 3250 inferrepo=False):
3251 3251 def decorator(func):
3252 3252 if synopsis:
3253 3253 table[name] = func, list(options), synopsis
3254 3254 else:
3255 3255 table[name] = func, list(options)
3256 3256
3257 3257 if norepo:
3258 3258 # Avoid import cycle.
3259 3259 import commands
3260 3260 commands.norepo += ' %s' % ' '.join(parsealiases(name))
3261 3261
3262 3262 if optionalrepo:
3263 3263 import commands
3264 3264 commands.optionalrepo += ' %s' % ' '.join(parsealiases(name))
3265 3265
3266 3266 if inferrepo:
3267 3267 import commands
3268 3268 commands.inferrepo += ' %s' % ' '.join(parsealiases(name))
3269 3269
3270 3270 return func
3271 3271 return decorator
3272 3272
3273 3273 return cmd
3274 3274
3275 3275 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3276 3276 # commands.outgoing. "missing" is "missing" of the result of
3277 3277 # "findcommonoutgoing()"
3278 3278 outgoinghooks = util.hooks()
3279 3279
3280 3280 # a list of (ui, repo) functions called by commands.summary
3281 3281 summaryhooks = util.hooks()
3282 3282
3283 3283 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3284 3284 #
3285 3285 # functions should return tuple of booleans below, if 'changes' is None:
3286 3286 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3287 3287 #
3288 3288 # otherwise, 'changes' is a tuple of tuples below:
3289 3289 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3290 3290 # - (desturl, destbranch, destpeer, outgoing)
3291 3291 summaryremotehooks = util.hooks()
3292 3292
3293 3293 # A list of state files kept by multistep operations like graft.
3294 3294 # Since graft cannot be aborted, it is considered 'clearable' by update.
3295 3295 # note: bisect is intentionally excluded
3296 3296 # (state file, clearable, allowcommit, error, hint)
3297 3297 unfinishedstates = [
3298 3298 ('graftstate', True, False, _('graft in progress'),
3299 3299 _("use 'hg graft --continue' or 'hg update' to abort")),
3300 3300 ('updatestate', True, False, _('last update was interrupted'),
3301 3301 _("use 'hg update' to get a consistent checkout"))
3302 3302 ]
3303 3303
3304 3304 def checkunfinished(repo, commit=False):
3305 3305 '''Look for an unfinished multistep operation, like graft, and abort
3306 3306 if found. It's probably good to check this right before
3307 3307 bailifchanged().
3308 3308 '''
3309 3309 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3310 3310 if commit and allowcommit:
3311 3311 continue
3312 3312 if repo.vfs.exists(f):
3313 3313 raise error.Abort(msg, hint=hint)
3314 3314
3315 3315 def clearunfinished(repo):
3316 3316 '''Check for unfinished operations (as above), and clear the ones
3317 3317 that are clearable.
3318 3318 '''
3319 3319 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3320 3320 if not clearable and repo.vfs.exists(f):
3321 3321 raise error.Abort(msg, hint=hint)
3322 3322 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3323 3323 if clearable and repo.vfs.exists(f):
3324 3324 util.unlink(repo.join(f))
3325 3325
3326 3326 class dirstateguard(object):
3327 3327 '''Restore dirstate at unexpected failure.
3328 3328
3329 3329 At the construction, this class does:
3330 3330
3331 3331 - write current ``repo.dirstate`` out, and
3332 3332 - save ``.hg/dirstate`` into the backup file
3333 3333
3334 3334 This restores ``.hg/dirstate`` from backup file, if ``release()``
3335 3335 is invoked before ``close()``.
3336 3336
3337 3337 This just removes the backup file at ``close()`` before ``release()``.
3338 3338 '''
3339 3339
3340 3340 def __init__(self, repo, name):
3341 3341 self._repo = repo
3342 3342 self._suffix = '.backup.%s.%d' % (name, id(self))
3343 3343 repo.dirstate._savebackup(repo.currenttransaction(), self._suffix)
3344 3344 self._active = True
3345 3345 self._closed = False
3346 3346
3347 3347 def __del__(self):
3348 3348 if self._active: # still active
3349 3349 # this may occur, even if this class is used correctly:
3350 3350 # for example, releasing other resources like transaction
3351 3351 # may raise exception before ``dirstateguard.release`` in
3352 3352 # ``release(tr, ....)``.
3353 3353 self._abort()
3354 3354
3355 3355 def close(self):
3356 3356 if not self._active: # already inactivated
3357 3357 msg = (_("can't close already inactivated backup: dirstate%s")
3358 3358 % self._suffix)
3359 3359 raise error.Abort(msg)
3360 3360
3361 3361 self._repo.dirstate._clearbackup(self._repo.currenttransaction(),
3362 3362 self._suffix)
3363 3363 self._active = False
3364 3364 self._closed = True
3365 3365
3366 3366 def _abort(self):
3367 3367 self._repo.dirstate._restorebackup(self._repo.currenttransaction(),
3368 3368 self._suffix)
3369 3369 self._active = False
3370 3370
3371 3371 def release(self):
3372 3372 if not self._closed:
3373 3373 if not self._active: # already inactivated
3374 3374 msg = (_("can't release already inactivated backup:"
3375 3375 " dirstate%s")
3376 3376 % self._suffix)
3377 3377 raise error.Abort(msg)
3378 3378 self._abort()
General Comments 0
You need to be logged in to leave comments. Login now