##// END OF EJS Templates
serve: allow --daemon-postexec to be 'unlink:path' or 'none'...
Jun Wu -
r28195:213c8cf0 default
parent child Browse files
Show More
@@ -1,3453 +1,3457
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import hex, bin, nullid, nullrev, short
9 9 from i18n import _
10 10 import os, sys, errno, re, tempfile, cStringIO
11 11 import util, scmutil, templater, patch, error, templatekw, revlog, copies
12 12 import match as matchmod
13 13 import repair, graphmod, revset, phases, obsolete, pathutil
14 14 import changelog
15 15 import bookmarks
16 16 import encoding
17 17 import formatter
18 18 import crecord as crecordmod
19 19 import lock as lockmod
20 20
21 21 def ishunk(x):
22 22 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
23 23 return isinstance(x, hunkclasses)
24 24
25 25 def newandmodified(chunks, originalchunks):
26 26 newlyaddedandmodifiedfiles = set()
27 27 for chunk in chunks:
28 28 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
29 29 originalchunks:
30 30 newlyaddedandmodifiedfiles.add(chunk.header.filename())
31 31 return newlyaddedandmodifiedfiles
32 32
33 33 def parsealiases(cmd):
34 34 return cmd.lstrip("^").split("|")
35 35
36 36 def setupwrapcolorwrite(ui):
37 37 # wrap ui.write so diff output can be labeled/colorized
38 38 def wrapwrite(orig, *args, **kw):
39 39 label = kw.pop('label', '')
40 40 for chunk, l in patch.difflabel(lambda: args):
41 41 orig(chunk, label=label + l)
42 42
43 43 oldwrite = ui.write
44 44 def wrap(*args, **kwargs):
45 45 return wrapwrite(oldwrite, *args, **kwargs)
46 46 setattr(ui, 'write', wrap)
47 47 return oldwrite
48 48
49 49 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
50 50 if usecurses:
51 51 if testfile:
52 52 recordfn = crecordmod.testdecorator(testfile,
53 53 crecordmod.testchunkselector)
54 54 else:
55 55 recordfn = crecordmod.chunkselector
56 56
57 57 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
58 58
59 59 else:
60 60 return patch.filterpatch(ui, originalhunks, operation)
61 61
62 62 def recordfilter(ui, originalhunks, operation=None):
63 63 """ Prompts the user to filter the originalhunks and return a list of
64 64 selected hunks.
65 65 *operation* is used for ui purposes to indicate the user
66 66 what kind of filtering they are doing: reverting, committing, shelving, etc.
67 67 *operation* has to be a translated string.
68 68 """
69 69 usecurses = crecordmod.checkcurses(ui)
70 70 testfile = ui.config('experimental', 'crecordtest', None)
71 71 oldwrite = setupwrapcolorwrite(ui)
72 72 try:
73 73 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
74 74 testfile, operation)
75 75 finally:
76 76 ui.write = oldwrite
77 77 return newchunks, newopts
78 78
79 79 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
80 80 filterfn, *pats, **opts):
81 81 import merge as mergemod
82 82
83 83 if not ui.interactive():
84 84 if cmdsuggest:
85 85 msg = _('running non-interactively, use %s instead') % cmdsuggest
86 86 else:
87 87 msg = _('running non-interactively')
88 88 raise error.Abort(msg)
89 89
90 90 # make sure username is set before going interactive
91 91 if not opts.get('user'):
92 92 ui.username() # raise exception, username not provided
93 93
94 94 def recordfunc(ui, repo, message, match, opts):
95 95 """This is generic record driver.
96 96
97 97 Its job is to interactively filter local changes, and
98 98 accordingly prepare working directory into a state in which the
99 99 job can be delegated to a non-interactive commit command such as
100 100 'commit' or 'qrefresh'.
101 101
102 102 After the actual job is done by non-interactive command, the
103 103 working directory is restored to its original state.
104 104
105 105 In the end we'll record interesting changes, and everything else
106 106 will be left in place, so the user can continue working.
107 107 """
108 108
109 109 checkunfinished(repo, commit=True)
110 110 merge = len(repo[None].parents()) > 1
111 111 if merge:
112 112 raise error.Abort(_('cannot partially commit a merge '
113 113 '(use "hg commit" instead)'))
114 114
115 115 status = repo.status(match=match)
116 116 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
117 117 diffopts.nodates = True
118 118 diffopts.git = True
119 119 diffopts.showfunc = True
120 120 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
121 121 originalchunks = patch.parsepatch(originaldiff)
122 122
123 123 # 1. filter patch, so we have intending-to apply subset of it
124 124 try:
125 125 chunks, newopts = filterfn(ui, originalchunks)
126 126 except patch.PatchError as err:
127 127 raise error.Abort(_('error parsing patch: %s') % err)
128 128 opts.update(newopts)
129 129
130 130 # We need to keep a backup of files that have been newly added and
131 131 # modified during the recording process because there is a previous
132 132 # version without the edit in the workdir
133 133 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
134 134 contenders = set()
135 135 for h in chunks:
136 136 try:
137 137 contenders.update(set(h.files()))
138 138 except AttributeError:
139 139 pass
140 140
141 141 changed = status.modified + status.added + status.removed
142 142 newfiles = [f for f in changed if f in contenders]
143 143 if not newfiles:
144 144 ui.status(_('no changes to record\n'))
145 145 return 0
146 146
147 147 modified = set(status.modified)
148 148
149 149 # 2. backup changed files, so we can restore them in the end
150 150
151 151 if backupall:
152 152 tobackup = changed
153 153 else:
154 154 tobackup = [f for f in newfiles if f in modified or f in \
155 155 newlyaddedandmodifiedfiles]
156 156 backups = {}
157 157 if tobackup:
158 158 backupdir = repo.join('record-backups')
159 159 try:
160 160 os.mkdir(backupdir)
161 161 except OSError as err:
162 162 if err.errno != errno.EEXIST:
163 163 raise
164 164 try:
165 165 # backup continues
166 166 for f in tobackup:
167 167 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
168 168 dir=backupdir)
169 169 os.close(fd)
170 170 ui.debug('backup %r as %r\n' % (f, tmpname))
171 171 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
172 172 backups[f] = tmpname
173 173
174 174 fp = cStringIO.StringIO()
175 175 for c in chunks:
176 176 fname = c.filename()
177 177 if fname in backups:
178 178 c.write(fp)
179 179 dopatch = fp.tell()
180 180 fp.seek(0)
181 181
182 182 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
183 183 # 3a. apply filtered patch to clean repo (clean)
184 184 if backups:
185 185 # Equivalent to hg.revert
186 186 m = scmutil.matchfiles(repo, backups.keys())
187 187 mergemod.update(repo, repo.dirstate.p1(),
188 188 False, True, matcher=m)
189 189
190 190 # 3b. (apply)
191 191 if dopatch:
192 192 try:
193 193 ui.debug('applying patch\n')
194 194 ui.debug(fp.getvalue())
195 195 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
196 196 except patch.PatchError as err:
197 197 raise error.Abort(str(err))
198 198 del fp
199 199
200 200 # 4. We prepared working directory according to filtered
201 201 # patch. Now is the time to delegate the job to
202 202 # commit/qrefresh or the like!
203 203
204 204 # Make all of the pathnames absolute.
205 205 newfiles = [repo.wjoin(nf) for nf in newfiles]
206 206 return commitfunc(ui, repo, *newfiles, **opts)
207 207 finally:
208 208 # 5. finally restore backed-up files
209 209 try:
210 210 dirstate = repo.dirstate
211 211 for realname, tmpname in backups.iteritems():
212 212 ui.debug('restoring %r to %r\n' % (tmpname, realname))
213 213
214 214 if dirstate[realname] == 'n':
215 215 # without normallookup, restoring timestamp
216 216 # may cause partially committed files
217 217 # to be treated as unmodified
218 218 dirstate.normallookup(realname)
219 219
220 220 # copystat=True here and above are a hack to trick any
221 221 # editors that have f open that we haven't modified them.
222 222 #
223 223 # Also note that this racy as an editor could notice the
224 224 # file's mtime before we've finished writing it.
225 225 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
226 226 os.unlink(tmpname)
227 227 if tobackup:
228 228 os.rmdir(backupdir)
229 229 except OSError:
230 230 pass
231 231
232 232 def recordinwlock(ui, repo, message, match, opts):
233 233 with repo.wlock():
234 234 return recordfunc(ui, repo, message, match, opts)
235 235
236 236 return commit(ui, repo, recordinwlock, pats, opts)
237 237
238 238 def findpossible(cmd, table, strict=False):
239 239 """
240 240 Return cmd -> (aliases, command table entry)
241 241 for each matching command.
242 242 Return debug commands (or their aliases) only if no normal command matches.
243 243 """
244 244 choice = {}
245 245 debugchoice = {}
246 246
247 247 if cmd in table:
248 248 # short-circuit exact matches, "log" alias beats "^log|history"
249 249 keys = [cmd]
250 250 else:
251 251 keys = table.keys()
252 252
253 253 allcmds = []
254 254 for e in keys:
255 255 aliases = parsealiases(e)
256 256 allcmds.extend(aliases)
257 257 found = None
258 258 if cmd in aliases:
259 259 found = cmd
260 260 elif not strict:
261 261 for a in aliases:
262 262 if a.startswith(cmd):
263 263 found = a
264 264 break
265 265 if found is not None:
266 266 if aliases[0].startswith("debug") or found.startswith("debug"):
267 267 debugchoice[found] = (aliases, table[e])
268 268 else:
269 269 choice[found] = (aliases, table[e])
270 270
271 271 if not choice and debugchoice:
272 272 choice = debugchoice
273 273
274 274 return choice, allcmds
275 275
276 276 def findcmd(cmd, table, strict=True):
277 277 """Return (aliases, command table entry) for command string."""
278 278 choice, allcmds = findpossible(cmd, table, strict)
279 279
280 280 if cmd in choice:
281 281 return choice[cmd]
282 282
283 283 if len(choice) > 1:
284 284 clist = choice.keys()
285 285 clist.sort()
286 286 raise error.AmbiguousCommand(cmd, clist)
287 287
288 288 if choice:
289 289 return choice.values()[0]
290 290
291 291 raise error.UnknownCommand(cmd, allcmds)
292 292
293 293 def findrepo(p):
294 294 while not os.path.isdir(os.path.join(p, ".hg")):
295 295 oldp, p = p, os.path.dirname(p)
296 296 if p == oldp:
297 297 return None
298 298
299 299 return p
300 300
301 301 def bailifchanged(repo, merge=True):
302 302 if merge and repo.dirstate.p2() != nullid:
303 303 raise error.Abort(_('outstanding uncommitted merge'))
304 304 modified, added, removed, deleted = repo.status()[:4]
305 305 if modified or added or removed or deleted:
306 306 raise error.Abort(_('uncommitted changes'))
307 307 ctx = repo[None]
308 308 for s in sorted(ctx.substate):
309 309 ctx.sub(s).bailifchanged()
310 310
311 311 def logmessage(ui, opts):
312 312 """ get the log message according to -m and -l option """
313 313 message = opts.get('message')
314 314 logfile = opts.get('logfile')
315 315
316 316 if message and logfile:
317 317 raise error.Abort(_('options --message and --logfile are mutually '
318 318 'exclusive'))
319 319 if not message and logfile:
320 320 try:
321 321 if logfile == '-':
322 322 message = ui.fin.read()
323 323 else:
324 324 message = '\n'.join(util.readfile(logfile).splitlines())
325 325 except IOError as inst:
326 326 raise error.Abort(_("can't read commit message '%s': %s") %
327 327 (logfile, inst.strerror))
328 328 return message
329 329
330 330 def mergeeditform(ctxorbool, baseformname):
331 331 """return appropriate editform name (referencing a committemplate)
332 332
333 333 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
334 334 merging is committed.
335 335
336 336 This returns baseformname with '.merge' appended if it is a merge,
337 337 otherwise '.normal' is appended.
338 338 """
339 339 if isinstance(ctxorbool, bool):
340 340 if ctxorbool:
341 341 return baseformname + ".merge"
342 342 elif 1 < len(ctxorbool.parents()):
343 343 return baseformname + ".merge"
344 344
345 345 return baseformname + ".normal"
346 346
347 347 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
348 348 editform='', **opts):
349 349 """get appropriate commit message editor according to '--edit' option
350 350
351 351 'finishdesc' is a function to be called with edited commit message
352 352 (= 'description' of the new changeset) just after editing, but
353 353 before checking empty-ness. It should return actual text to be
354 354 stored into history. This allows to change description before
355 355 storing.
356 356
357 357 'extramsg' is a extra message to be shown in the editor instead of
358 358 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
359 359 is automatically added.
360 360
361 361 'editform' is a dot-separated list of names, to distinguish
362 362 the purpose of commit text editing.
363 363
364 364 'getcommiteditor' returns 'commitforceeditor' regardless of
365 365 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
366 366 they are specific for usage in MQ.
367 367 """
368 368 if edit or finishdesc or extramsg:
369 369 return lambda r, c, s: commitforceeditor(r, c, s,
370 370 finishdesc=finishdesc,
371 371 extramsg=extramsg,
372 372 editform=editform)
373 373 elif editform:
374 374 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
375 375 else:
376 376 return commiteditor
377 377
378 378 def loglimit(opts):
379 379 """get the log limit according to option -l/--limit"""
380 380 limit = opts.get('limit')
381 381 if limit:
382 382 try:
383 383 limit = int(limit)
384 384 except ValueError:
385 385 raise error.Abort(_('limit must be a positive integer'))
386 386 if limit <= 0:
387 387 raise error.Abort(_('limit must be positive'))
388 388 else:
389 389 limit = None
390 390 return limit
391 391
392 392 def makefilename(repo, pat, node, desc=None,
393 393 total=None, seqno=None, revwidth=None, pathname=None):
394 394 node_expander = {
395 395 'H': lambda: hex(node),
396 396 'R': lambda: str(repo.changelog.rev(node)),
397 397 'h': lambda: short(node),
398 398 'm': lambda: re.sub('[^\w]', '_', str(desc))
399 399 }
400 400 expander = {
401 401 '%': lambda: '%',
402 402 'b': lambda: os.path.basename(repo.root),
403 403 }
404 404
405 405 try:
406 406 if node:
407 407 expander.update(node_expander)
408 408 if node:
409 409 expander['r'] = (lambda:
410 410 str(repo.changelog.rev(node)).zfill(revwidth or 0))
411 411 if total is not None:
412 412 expander['N'] = lambda: str(total)
413 413 if seqno is not None:
414 414 expander['n'] = lambda: str(seqno)
415 415 if total is not None and seqno is not None:
416 416 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
417 417 if pathname is not None:
418 418 expander['s'] = lambda: os.path.basename(pathname)
419 419 expander['d'] = lambda: os.path.dirname(pathname) or '.'
420 420 expander['p'] = lambda: pathname
421 421
422 422 newname = []
423 423 patlen = len(pat)
424 424 i = 0
425 425 while i < patlen:
426 426 c = pat[i]
427 427 if c == '%':
428 428 i += 1
429 429 c = pat[i]
430 430 c = expander[c]()
431 431 newname.append(c)
432 432 i += 1
433 433 return ''.join(newname)
434 434 except KeyError as inst:
435 435 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
436 436 inst.args[0])
437 437
438 438 class _unclosablefile(object):
439 439 def __init__(self, fp):
440 440 self._fp = fp
441 441
442 442 def close(self):
443 443 pass
444 444
445 445 def __iter__(self):
446 446 return iter(self._fp)
447 447
448 448 def __getattr__(self, attr):
449 449 return getattr(self._fp, attr)
450 450
451 451 def makefileobj(repo, pat, node=None, desc=None, total=None,
452 452 seqno=None, revwidth=None, mode='wb', modemap=None,
453 453 pathname=None):
454 454
455 455 writable = mode not in ('r', 'rb')
456 456
457 457 if not pat or pat == '-':
458 458 if writable:
459 459 fp = repo.ui.fout
460 460 else:
461 461 fp = repo.ui.fin
462 462 return _unclosablefile(fp)
463 463 if util.safehasattr(pat, 'write') and writable:
464 464 return pat
465 465 if util.safehasattr(pat, 'read') and 'r' in mode:
466 466 return pat
467 467 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
468 468 if modemap is not None:
469 469 mode = modemap.get(fn, mode)
470 470 if mode == 'wb':
471 471 modemap[fn] = 'ab'
472 472 return open(fn, mode)
473 473
474 474 def openrevlog(repo, cmd, file_, opts):
475 475 """opens the changelog, manifest, a filelog or a given revlog"""
476 476 cl = opts['changelog']
477 477 mf = opts['manifest']
478 478 dir = opts['dir']
479 479 msg = None
480 480 if cl and mf:
481 481 msg = _('cannot specify --changelog and --manifest at the same time')
482 482 elif cl and dir:
483 483 msg = _('cannot specify --changelog and --dir at the same time')
484 484 elif cl or mf:
485 485 if file_:
486 486 msg = _('cannot specify filename with --changelog or --manifest')
487 487 elif not repo:
488 488 msg = _('cannot specify --changelog or --manifest or --dir '
489 489 'without a repository')
490 490 if msg:
491 491 raise error.Abort(msg)
492 492
493 493 r = None
494 494 if repo:
495 495 if cl:
496 496 r = repo.unfiltered().changelog
497 497 elif dir:
498 498 if 'treemanifest' not in repo.requirements:
499 499 raise error.Abort(_("--dir can only be used on repos with "
500 500 "treemanifest enabled"))
501 501 dirlog = repo.dirlog(file_)
502 502 if len(dirlog):
503 503 r = dirlog
504 504 elif mf:
505 505 r = repo.manifest
506 506 elif file_:
507 507 filelog = repo.file(file_)
508 508 if len(filelog):
509 509 r = filelog
510 510 if not r:
511 511 if not file_:
512 512 raise error.CommandError(cmd, _('invalid arguments'))
513 513 if not os.path.isfile(file_):
514 514 raise error.Abort(_("revlog '%s' not found") % file_)
515 515 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
516 516 file_[:-2] + ".i")
517 517 return r
518 518
519 519 def copy(ui, repo, pats, opts, rename=False):
520 520 # called with the repo lock held
521 521 #
522 522 # hgsep => pathname that uses "/" to separate directories
523 523 # ossep => pathname that uses os.sep to separate directories
524 524 cwd = repo.getcwd()
525 525 targets = {}
526 526 after = opts.get("after")
527 527 dryrun = opts.get("dry_run")
528 528 wctx = repo[None]
529 529
530 530 def walkpat(pat):
531 531 srcs = []
532 532 if after:
533 533 badstates = '?'
534 534 else:
535 535 badstates = '?r'
536 536 m = scmutil.match(repo[None], [pat], opts, globbed=True)
537 537 for abs in repo.walk(m):
538 538 state = repo.dirstate[abs]
539 539 rel = m.rel(abs)
540 540 exact = m.exact(abs)
541 541 if state in badstates:
542 542 if exact and state == '?':
543 543 ui.warn(_('%s: not copying - file is not managed\n') % rel)
544 544 if exact and state == 'r':
545 545 ui.warn(_('%s: not copying - file has been marked for'
546 546 ' remove\n') % rel)
547 547 continue
548 548 # abs: hgsep
549 549 # rel: ossep
550 550 srcs.append((abs, rel, exact))
551 551 return srcs
552 552
553 553 # abssrc: hgsep
554 554 # relsrc: ossep
555 555 # otarget: ossep
556 556 def copyfile(abssrc, relsrc, otarget, exact):
557 557 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
558 558 if '/' in abstarget:
559 559 # We cannot normalize abstarget itself, this would prevent
560 560 # case only renames, like a => A.
561 561 abspath, absname = abstarget.rsplit('/', 1)
562 562 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
563 563 reltarget = repo.pathto(abstarget, cwd)
564 564 target = repo.wjoin(abstarget)
565 565 src = repo.wjoin(abssrc)
566 566 state = repo.dirstate[abstarget]
567 567
568 568 scmutil.checkportable(ui, abstarget)
569 569
570 570 # check for collisions
571 571 prevsrc = targets.get(abstarget)
572 572 if prevsrc is not None:
573 573 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
574 574 (reltarget, repo.pathto(abssrc, cwd),
575 575 repo.pathto(prevsrc, cwd)))
576 576 return
577 577
578 578 # check for overwrites
579 579 exists = os.path.lexists(target)
580 580 samefile = False
581 581 if exists and abssrc != abstarget:
582 582 if (repo.dirstate.normalize(abssrc) ==
583 583 repo.dirstate.normalize(abstarget)):
584 584 if not rename:
585 585 ui.warn(_("%s: can't copy - same file\n") % reltarget)
586 586 return
587 587 exists = False
588 588 samefile = True
589 589
590 590 if not after and exists or after and state in 'mn':
591 591 if not opts['force']:
592 592 ui.warn(_('%s: not overwriting - file exists\n') %
593 593 reltarget)
594 594 return
595 595
596 596 if after:
597 597 if not exists:
598 598 if rename:
599 599 ui.warn(_('%s: not recording move - %s does not exist\n') %
600 600 (relsrc, reltarget))
601 601 else:
602 602 ui.warn(_('%s: not recording copy - %s does not exist\n') %
603 603 (relsrc, reltarget))
604 604 return
605 605 elif not dryrun:
606 606 try:
607 607 if exists:
608 608 os.unlink(target)
609 609 targetdir = os.path.dirname(target) or '.'
610 610 if not os.path.isdir(targetdir):
611 611 os.makedirs(targetdir)
612 612 if samefile:
613 613 tmp = target + "~hgrename"
614 614 os.rename(src, tmp)
615 615 os.rename(tmp, target)
616 616 else:
617 617 util.copyfile(src, target)
618 618 srcexists = True
619 619 except IOError as inst:
620 620 if inst.errno == errno.ENOENT:
621 621 ui.warn(_('%s: deleted in working directory\n') % relsrc)
622 622 srcexists = False
623 623 else:
624 624 ui.warn(_('%s: cannot copy - %s\n') %
625 625 (relsrc, inst.strerror))
626 626 return True # report a failure
627 627
628 628 if ui.verbose or not exact:
629 629 if rename:
630 630 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
631 631 else:
632 632 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
633 633
634 634 targets[abstarget] = abssrc
635 635
636 636 # fix up dirstate
637 637 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
638 638 dryrun=dryrun, cwd=cwd)
639 639 if rename and not dryrun:
640 640 if not after and srcexists and not samefile:
641 641 util.unlinkpath(repo.wjoin(abssrc))
642 642 wctx.forget([abssrc])
643 643
644 644 # pat: ossep
645 645 # dest ossep
646 646 # srcs: list of (hgsep, hgsep, ossep, bool)
647 647 # return: function that takes hgsep and returns ossep
648 648 def targetpathfn(pat, dest, srcs):
649 649 if os.path.isdir(pat):
650 650 abspfx = pathutil.canonpath(repo.root, cwd, pat)
651 651 abspfx = util.localpath(abspfx)
652 652 if destdirexists:
653 653 striplen = len(os.path.split(abspfx)[0])
654 654 else:
655 655 striplen = len(abspfx)
656 656 if striplen:
657 657 striplen += len(os.sep)
658 658 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
659 659 elif destdirexists:
660 660 res = lambda p: os.path.join(dest,
661 661 os.path.basename(util.localpath(p)))
662 662 else:
663 663 res = lambda p: dest
664 664 return res
665 665
666 666 # pat: ossep
667 667 # dest ossep
668 668 # srcs: list of (hgsep, hgsep, ossep, bool)
669 669 # return: function that takes hgsep and returns ossep
670 670 def targetpathafterfn(pat, dest, srcs):
671 671 if matchmod.patkind(pat):
672 672 # a mercurial pattern
673 673 res = lambda p: os.path.join(dest,
674 674 os.path.basename(util.localpath(p)))
675 675 else:
676 676 abspfx = pathutil.canonpath(repo.root, cwd, pat)
677 677 if len(abspfx) < len(srcs[0][0]):
678 678 # A directory. Either the target path contains the last
679 679 # component of the source path or it does not.
680 680 def evalpath(striplen):
681 681 score = 0
682 682 for s in srcs:
683 683 t = os.path.join(dest, util.localpath(s[0])[striplen:])
684 684 if os.path.lexists(t):
685 685 score += 1
686 686 return score
687 687
688 688 abspfx = util.localpath(abspfx)
689 689 striplen = len(abspfx)
690 690 if striplen:
691 691 striplen += len(os.sep)
692 692 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
693 693 score = evalpath(striplen)
694 694 striplen1 = len(os.path.split(abspfx)[0])
695 695 if striplen1:
696 696 striplen1 += len(os.sep)
697 697 if evalpath(striplen1) > score:
698 698 striplen = striplen1
699 699 res = lambda p: os.path.join(dest,
700 700 util.localpath(p)[striplen:])
701 701 else:
702 702 # a file
703 703 if destdirexists:
704 704 res = lambda p: os.path.join(dest,
705 705 os.path.basename(util.localpath(p)))
706 706 else:
707 707 res = lambda p: dest
708 708 return res
709 709
710 710 pats = scmutil.expandpats(pats)
711 711 if not pats:
712 712 raise error.Abort(_('no source or destination specified'))
713 713 if len(pats) == 1:
714 714 raise error.Abort(_('no destination specified'))
715 715 dest = pats.pop()
716 716 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
717 717 if not destdirexists:
718 718 if len(pats) > 1 or matchmod.patkind(pats[0]):
719 719 raise error.Abort(_('with multiple sources, destination must be an '
720 720 'existing directory'))
721 721 if util.endswithsep(dest):
722 722 raise error.Abort(_('destination %s is not a directory') % dest)
723 723
724 724 tfn = targetpathfn
725 725 if after:
726 726 tfn = targetpathafterfn
727 727 copylist = []
728 728 for pat in pats:
729 729 srcs = walkpat(pat)
730 730 if not srcs:
731 731 continue
732 732 copylist.append((tfn(pat, dest, srcs), srcs))
733 733 if not copylist:
734 734 raise error.Abort(_('no files to copy'))
735 735
736 736 errors = 0
737 737 for targetpath, srcs in copylist:
738 738 for abssrc, relsrc, exact in srcs:
739 739 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
740 740 errors += 1
741 741
742 742 if errors:
743 743 ui.warn(_('(consider using --after)\n'))
744 744
745 745 return errors != 0
746 746
747 747 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
748 748 runargs=None, appendpid=False):
749 749 '''Run a command as a service.'''
750 750
751 751 def writepid(pid):
752 752 if opts['pid_file']:
753 753 if appendpid:
754 754 mode = 'a'
755 755 else:
756 756 mode = 'w'
757 757 fp = open(opts['pid_file'], mode)
758 758 fp.write(str(pid) + '\n')
759 759 fp.close()
760 760
761 761 if opts['daemon'] and not opts['daemon_postexec']:
762 762 # Signal child process startup with file removal
763 763 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
764 764 os.close(lockfd)
765 765 try:
766 766 if not runargs:
767 767 runargs = util.hgcmd() + sys.argv[1:]
768 runargs.append('--daemon-postexec=%s' % lockpath)
768 runargs.append('--daemon-postexec=unlink:%s' % lockpath)
769 769 # Don't pass --cwd to the child process, because we've already
770 770 # changed directory.
771 771 for i in xrange(1, len(runargs)):
772 772 if runargs[i].startswith('--cwd='):
773 773 del runargs[i]
774 774 break
775 775 elif runargs[i].startswith('--cwd'):
776 776 del runargs[i:i + 2]
777 777 break
778 778 def condfn():
779 779 return not os.path.exists(lockpath)
780 780 pid = util.rundetached(runargs, condfn)
781 781 if pid < 0:
782 782 raise error.Abort(_('child process failed to start'))
783 783 writepid(pid)
784 784 finally:
785 785 try:
786 786 os.unlink(lockpath)
787 787 except OSError as e:
788 788 if e.errno != errno.ENOENT:
789 789 raise
790 790 if parentfn:
791 791 return parentfn(pid)
792 792 else:
793 793 return
794 794
795 795 if initfn:
796 796 initfn()
797 797
798 798 if not opts['daemon']:
799 799 writepid(util.getpid())
800 800
801 801 if opts['daemon_postexec']:
802 lockpath = opts['daemon_postexec']
802 inst = opts['daemon_postexec']
803 803 try:
804 804 os.setsid()
805 805 except AttributeError:
806 806 pass
807 if inst.startswith('unlink:'):
808 lockpath = inst[7:]
807 809 os.unlink(lockpath)
810 elif inst != 'none':
811 raise error.Abort(_('invalid value for --daemon-postexec'))
808 812 util.hidewindow()
809 813 sys.stdout.flush()
810 814 sys.stderr.flush()
811 815
812 816 nullfd = os.open(os.devnull, os.O_RDWR)
813 817 logfilefd = nullfd
814 818 if logfile:
815 819 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
816 820 os.dup2(nullfd, 0)
817 821 os.dup2(logfilefd, 1)
818 822 os.dup2(logfilefd, 2)
819 823 if nullfd not in (0, 1, 2):
820 824 os.close(nullfd)
821 825 if logfile and logfilefd not in (0, 1, 2):
822 826 os.close(logfilefd)
823 827
824 828 if runfn:
825 829 return runfn()
826 830
827 831 ## facility to let extension process additional data into an import patch
828 832 # list of identifier to be executed in order
829 833 extrapreimport = [] # run before commit
830 834 extrapostimport = [] # run after commit
831 835 # mapping from identifier to actual import function
832 836 #
833 837 # 'preimport' are run before the commit is made and are provided the following
834 838 # arguments:
835 839 # - repo: the localrepository instance,
836 840 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
837 841 # - extra: the future extra dictionary of the changeset, please mutate it,
838 842 # - opts: the import options.
839 843 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
840 844 # mutation of in memory commit and more. Feel free to rework the code to get
841 845 # there.
842 846 extrapreimportmap = {}
843 847 # 'postimport' are run after the commit is made and are provided the following
844 848 # argument:
845 849 # - ctx: the changectx created by import.
846 850 extrapostimportmap = {}
847 851
848 852 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
849 853 """Utility function used by commands.import to import a single patch
850 854
851 855 This function is explicitly defined here to help the evolve extension to
852 856 wrap this part of the import logic.
853 857
854 858 The API is currently a bit ugly because it a simple code translation from
855 859 the import command. Feel free to make it better.
856 860
857 861 :hunk: a patch (as a binary string)
858 862 :parents: nodes that will be parent of the created commit
859 863 :opts: the full dict of option passed to the import command
860 864 :msgs: list to save commit message to.
861 865 (used in case we need to save it when failing)
862 866 :updatefunc: a function that update a repo to a given node
863 867 updatefunc(<repo>, <node>)
864 868 """
865 869 # avoid cycle context -> subrepo -> cmdutil
866 870 import context
867 871 extractdata = patch.extract(ui, hunk)
868 872 tmpname = extractdata.get('filename')
869 873 message = extractdata.get('message')
870 874 user = opts.get('user') or extractdata.get('user')
871 875 date = opts.get('date') or extractdata.get('date')
872 876 branch = extractdata.get('branch')
873 877 nodeid = extractdata.get('nodeid')
874 878 p1 = extractdata.get('p1')
875 879 p2 = extractdata.get('p2')
876 880
877 881 nocommit = opts.get('no_commit')
878 882 importbranch = opts.get('import_branch')
879 883 update = not opts.get('bypass')
880 884 strip = opts["strip"]
881 885 prefix = opts["prefix"]
882 886 sim = float(opts.get('similarity') or 0)
883 887 if not tmpname:
884 888 return (None, None, False)
885 889
886 890 rejects = False
887 891
888 892 try:
889 893 cmdline_message = logmessage(ui, opts)
890 894 if cmdline_message:
891 895 # pickup the cmdline msg
892 896 message = cmdline_message
893 897 elif message:
894 898 # pickup the patch msg
895 899 message = message.strip()
896 900 else:
897 901 # launch the editor
898 902 message = None
899 903 ui.debug('message:\n%s\n' % message)
900 904
901 905 if len(parents) == 1:
902 906 parents.append(repo[nullid])
903 907 if opts.get('exact'):
904 908 if not nodeid or not p1:
905 909 raise error.Abort(_('not a Mercurial patch'))
906 910 p1 = repo[p1]
907 911 p2 = repo[p2 or nullid]
908 912 elif p2:
909 913 try:
910 914 p1 = repo[p1]
911 915 p2 = repo[p2]
912 916 # Without any options, consider p2 only if the
913 917 # patch is being applied on top of the recorded
914 918 # first parent.
915 919 if p1 != parents[0]:
916 920 p1 = parents[0]
917 921 p2 = repo[nullid]
918 922 except error.RepoError:
919 923 p1, p2 = parents
920 924 if p2.node() == nullid:
921 925 ui.warn(_("warning: import the patch as a normal revision\n"
922 926 "(use --exact to import the patch as a merge)\n"))
923 927 else:
924 928 p1, p2 = parents
925 929
926 930 n = None
927 931 if update:
928 932 if p1 != parents[0]:
929 933 updatefunc(repo, p1.node())
930 934 if p2 != parents[1]:
931 935 repo.setparents(p1.node(), p2.node())
932 936
933 937 if opts.get('exact') or importbranch:
934 938 repo.dirstate.setbranch(branch or 'default')
935 939
936 940 partial = opts.get('partial', False)
937 941 files = set()
938 942 try:
939 943 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
940 944 files=files, eolmode=None, similarity=sim / 100.0)
941 945 except patch.PatchError as e:
942 946 if not partial:
943 947 raise error.Abort(str(e))
944 948 if partial:
945 949 rejects = True
946 950
947 951 files = list(files)
948 952 if nocommit:
949 953 if message:
950 954 msgs.append(message)
951 955 else:
952 956 if opts.get('exact') or p2:
953 957 # If you got here, you either use --force and know what
954 958 # you are doing or used --exact or a merge patch while
955 959 # being updated to its first parent.
956 960 m = None
957 961 else:
958 962 m = scmutil.matchfiles(repo, files or [])
959 963 editform = mergeeditform(repo[None], 'import.normal')
960 964 if opts.get('exact'):
961 965 editor = None
962 966 else:
963 967 editor = getcommiteditor(editform=editform, **opts)
964 968 allowemptyback = repo.ui.backupconfig('ui', 'allowemptycommit')
965 969 extra = {}
966 970 for idfunc in extrapreimport:
967 971 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
968 972 try:
969 973 if partial:
970 974 repo.ui.setconfig('ui', 'allowemptycommit', True)
971 975 n = repo.commit(message, user,
972 976 date, match=m,
973 977 editor=editor, extra=extra)
974 978 for idfunc in extrapostimport:
975 979 extrapostimportmap[idfunc](repo[n])
976 980 finally:
977 981 repo.ui.restoreconfig(allowemptyback)
978 982 else:
979 983 if opts.get('exact') or importbranch:
980 984 branch = branch or 'default'
981 985 else:
982 986 branch = p1.branch()
983 987 store = patch.filestore()
984 988 try:
985 989 files = set()
986 990 try:
987 991 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
988 992 files, eolmode=None)
989 993 except patch.PatchError as e:
990 994 raise error.Abort(str(e))
991 995 if opts.get('exact'):
992 996 editor = None
993 997 else:
994 998 editor = getcommiteditor(editform='import.bypass')
995 999 memctx = context.makememctx(repo, (p1.node(), p2.node()),
996 1000 message,
997 1001 user,
998 1002 date,
999 1003 branch, files, store,
1000 1004 editor=editor)
1001 1005 n = memctx.commit()
1002 1006 finally:
1003 1007 store.close()
1004 1008 if opts.get('exact') and nocommit:
1005 1009 # --exact with --no-commit is still useful in that it does merge
1006 1010 # and branch bits
1007 1011 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1008 1012 elif opts.get('exact') and hex(n) != nodeid:
1009 1013 raise error.Abort(_('patch is damaged or loses information'))
1010 1014 msg = _('applied to working directory')
1011 1015 if n:
1012 1016 # i18n: refers to a short changeset id
1013 1017 msg = _('created %s') % short(n)
1014 1018 return (msg, n, rejects)
1015 1019 finally:
1016 1020 os.unlink(tmpname)
1017 1021
1018 1022 # facility to let extensions include additional data in an exported patch
1019 1023 # list of identifiers to be executed in order
1020 1024 extraexport = []
1021 1025 # mapping from identifier to actual export function
1022 1026 # function as to return a string to be added to the header or None
1023 1027 # it is given two arguments (sequencenumber, changectx)
1024 1028 extraexportmap = {}
1025 1029
1026 1030 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1027 1031 opts=None, match=None):
1028 1032 '''export changesets as hg patches.'''
1029 1033
1030 1034 total = len(revs)
1031 1035 revwidth = max([len(str(rev)) for rev in revs])
1032 1036 filemode = {}
1033 1037
1034 1038 def single(rev, seqno, fp):
1035 1039 ctx = repo[rev]
1036 1040 node = ctx.node()
1037 1041 parents = [p.node() for p in ctx.parents() if p]
1038 1042 branch = ctx.branch()
1039 1043 if switch_parent:
1040 1044 parents.reverse()
1041 1045
1042 1046 if parents:
1043 1047 prev = parents[0]
1044 1048 else:
1045 1049 prev = nullid
1046 1050
1047 1051 shouldclose = False
1048 1052 if not fp and len(template) > 0:
1049 1053 desc_lines = ctx.description().rstrip().split('\n')
1050 1054 desc = desc_lines[0] #Commit always has a first line.
1051 1055 fp = makefileobj(repo, template, node, desc=desc, total=total,
1052 1056 seqno=seqno, revwidth=revwidth, mode='wb',
1053 1057 modemap=filemode)
1054 1058 shouldclose = True
1055 1059 if fp and not getattr(fp, 'name', '<unnamed>').startswith('<'):
1056 1060 repo.ui.note("%s\n" % fp.name)
1057 1061
1058 1062 if not fp:
1059 1063 write = repo.ui.write
1060 1064 else:
1061 1065 def write(s, **kw):
1062 1066 fp.write(s)
1063 1067
1064 1068 write("# HG changeset patch\n")
1065 1069 write("# User %s\n" % ctx.user())
1066 1070 write("# Date %d %d\n" % ctx.date())
1067 1071 write("# %s\n" % util.datestr(ctx.date()))
1068 1072 if branch and branch != 'default':
1069 1073 write("# Branch %s\n" % branch)
1070 1074 write("# Node ID %s\n" % hex(node))
1071 1075 write("# Parent %s\n" % hex(prev))
1072 1076 if len(parents) > 1:
1073 1077 write("# Parent %s\n" % hex(parents[1]))
1074 1078
1075 1079 for headerid in extraexport:
1076 1080 header = extraexportmap[headerid](seqno, ctx)
1077 1081 if header is not None:
1078 1082 write('# %s\n' % header)
1079 1083 write(ctx.description().rstrip())
1080 1084 write("\n\n")
1081 1085
1082 1086 for chunk, label in patch.diffui(repo, prev, node, match, opts=opts):
1083 1087 write(chunk, label=label)
1084 1088
1085 1089 if shouldclose:
1086 1090 fp.close()
1087 1091
1088 1092 for seqno, rev in enumerate(revs):
1089 1093 single(rev, seqno + 1, fp)
1090 1094
1091 1095 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1092 1096 changes=None, stat=False, fp=None, prefix='',
1093 1097 root='', listsubrepos=False):
1094 1098 '''show diff or diffstat.'''
1095 1099 if fp is None:
1096 1100 write = ui.write
1097 1101 else:
1098 1102 def write(s, **kw):
1099 1103 fp.write(s)
1100 1104
1101 1105 if root:
1102 1106 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1103 1107 else:
1104 1108 relroot = ''
1105 1109 if relroot != '':
1106 1110 # XXX relative roots currently don't work if the root is within a
1107 1111 # subrepo
1108 1112 uirelroot = match.uipath(relroot)
1109 1113 relroot += '/'
1110 1114 for matchroot in match.files():
1111 1115 if not matchroot.startswith(relroot):
1112 1116 ui.warn(_('warning: %s not inside relative root %s\n') % (
1113 1117 match.uipath(matchroot), uirelroot))
1114 1118
1115 1119 if stat:
1116 1120 diffopts = diffopts.copy(context=0)
1117 1121 width = 80
1118 1122 if not ui.plain():
1119 1123 width = ui.termwidth()
1120 1124 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1121 1125 prefix=prefix, relroot=relroot)
1122 1126 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1123 1127 width=width,
1124 1128 git=diffopts.git):
1125 1129 write(chunk, label=label)
1126 1130 else:
1127 1131 for chunk, label in patch.diffui(repo, node1, node2, match,
1128 1132 changes, diffopts, prefix=prefix,
1129 1133 relroot=relroot):
1130 1134 write(chunk, label=label)
1131 1135
1132 1136 if listsubrepos:
1133 1137 ctx1 = repo[node1]
1134 1138 ctx2 = repo[node2]
1135 1139 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1136 1140 tempnode2 = node2
1137 1141 try:
1138 1142 if node2 is not None:
1139 1143 tempnode2 = ctx2.substate[subpath][1]
1140 1144 except KeyError:
1141 1145 # A subrepo that existed in node1 was deleted between node1 and
1142 1146 # node2 (inclusive). Thus, ctx2's substate won't contain that
1143 1147 # subpath. The best we can do is to ignore it.
1144 1148 tempnode2 = None
1145 1149 submatch = matchmod.subdirmatcher(subpath, match)
1146 1150 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1147 1151 stat=stat, fp=fp, prefix=prefix)
1148 1152
1149 1153 class changeset_printer(object):
1150 1154 '''show changeset information when templating not requested.'''
1151 1155
1152 1156 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1153 1157 self.ui = ui
1154 1158 self.repo = repo
1155 1159 self.buffered = buffered
1156 1160 self.matchfn = matchfn
1157 1161 self.diffopts = diffopts
1158 1162 self.header = {}
1159 1163 self.hunk = {}
1160 1164 self.lastheader = None
1161 1165 self.footer = None
1162 1166
1163 1167 def flush(self, ctx):
1164 1168 rev = ctx.rev()
1165 1169 if rev in self.header:
1166 1170 h = self.header[rev]
1167 1171 if h != self.lastheader:
1168 1172 self.lastheader = h
1169 1173 self.ui.write(h)
1170 1174 del self.header[rev]
1171 1175 if rev in self.hunk:
1172 1176 self.ui.write(self.hunk[rev])
1173 1177 del self.hunk[rev]
1174 1178 return 1
1175 1179 return 0
1176 1180
1177 1181 def close(self):
1178 1182 if self.footer:
1179 1183 self.ui.write(self.footer)
1180 1184
1181 1185 def show(self, ctx, copies=None, matchfn=None, **props):
1182 1186 if self.buffered:
1183 1187 self.ui.pushbuffer(labeled=True)
1184 1188 self._show(ctx, copies, matchfn, props)
1185 1189 self.hunk[ctx.rev()] = self.ui.popbuffer()
1186 1190 else:
1187 1191 self._show(ctx, copies, matchfn, props)
1188 1192
1189 1193 def _show(self, ctx, copies, matchfn, props):
1190 1194 '''show a single changeset or file revision'''
1191 1195 changenode = ctx.node()
1192 1196 rev = ctx.rev()
1193 1197 if self.ui.debugflag:
1194 1198 hexfunc = hex
1195 1199 else:
1196 1200 hexfunc = short
1197 1201 # as of now, wctx.node() and wctx.rev() return None, but we want to
1198 1202 # show the same values as {node} and {rev} templatekw
1199 1203 revnode = (scmutil.intrev(rev), hexfunc(bin(ctx.hex())))
1200 1204
1201 1205 if self.ui.quiet:
1202 1206 self.ui.write("%d:%s\n" % revnode, label='log.node')
1203 1207 return
1204 1208
1205 1209 date = util.datestr(ctx.date())
1206 1210
1207 1211 # i18n: column positioning for "hg log"
1208 1212 self.ui.write(_("changeset: %d:%s\n") % revnode,
1209 1213 label='log.changeset changeset.%s' % ctx.phasestr())
1210 1214
1211 1215 # branches are shown first before any other names due to backwards
1212 1216 # compatibility
1213 1217 branch = ctx.branch()
1214 1218 # don't show the default branch name
1215 1219 if branch != 'default':
1216 1220 # i18n: column positioning for "hg log"
1217 1221 self.ui.write(_("branch: %s\n") % branch,
1218 1222 label='log.branch')
1219 1223
1220 1224 for name, ns in self.repo.names.iteritems():
1221 1225 # branches has special logic already handled above, so here we just
1222 1226 # skip it
1223 1227 if name == 'branches':
1224 1228 continue
1225 1229 # we will use the templatename as the color name since those two
1226 1230 # should be the same
1227 1231 for name in ns.names(self.repo, changenode):
1228 1232 self.ui.write(ns.logfmt % name,
1229 1233 label='log.%s' % ns.colorname)
1230 1234 if self.ui.debugflag:
1231 1235 # i18n: column positioning for "hg log"
1232 1236 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1233 1237 label='log.phase')
1234 1238 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1235 1239 label = 'log.parent changeset.%s' % pctx.phasestr()
1236 1240 # i18n: column positioning for "hg log"
1237 1241 self.ui.write(_("parent: %d:%s\n")
1238 1242 % (pctx.rev(), hexfunc(pctx.node())),
1239 1243 label=label)
1240 1244
1241 1245 if self.ui.debugflag and rev is not None:
1242 1246 mnode = ctx.manifestnode()
1243 1247 # i18n: column positioning for "hg log"
1244 1248 self.ui.write(_("manifest: %d:%s\n") %
1245 1249 (self.repo.manifest.rev(mnode), hex(mnode)),
1246 1250 label='ui.debug log.manifest')
1247 1251 # i18n: column positioning for "hg log"
1248 1252 self.ui.write(_("user: %s\n") % ctx.user(),
1249 1253 label='log.user')
1250 1254 # i18n: column positioning for "hg log"
1251 1255 self.ui.write(_("date: %s\n") % date,
1252 1256 label='log.date')
1253 1257
1254 1258 if self.ui.debugflag:
1255 1259 files = ctx.p1().status(ctx)[:3]
1256 1260 for key, value in zip([# i18n: column positioning for "hg log"
1257 1261 _("files:"),
1258 1262 # i18n: column positioning for "hg log"
1259 1263 _("files+:"),
1260 1264 # i18n: column positioning for "hg log"
1261 1265 _("files-:")], files):
1262 1266 if value:
1263 1267 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1264 1268 label='ui.debug log.files')
1265 1269 elif ctx.files() and self.ui.verbose:
1266 1270 # i18n: column positioning for "hg log"
1267 1271 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1268 1272 label='ui.note log.files')
1269 1273 if copies and self.ui.verbose:
1270 1274 copies = ['%s (%s)' % c for c in copies]
1271 1275 # i18n: column positioning for "hg log"
1272 1276 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1273 1277 label='ui.note log.copies')
1274 1278
1275 1279 extra = ctx.extra()
1276 1280 if extra and self.ui.debugflag:
1277 1281 for key, value in sorted(extra.items()):
1278 1282 # i18n: column positioning for "hg log"
1279 1283 self.ui.write(_("extra: %s=%s\n")
1280 1284 % (key, value.encode('string_escape')),
1281 1285 label='ui.debug log.extra')
1282 1286
1283 1287 description = ctx.description().strip()
1284 1288 if description:
1285 1289 if self.ui.verbose:
1286 1290 self.ui.write(_("description:\n"),
1287 1291 label='ui.note log.description')
1288 1292 self.ui.write(description,
1289 1293 label='ui.note log.description')
1290 1294 self.ui.write("\n\n")
1291 1295 else:
1292 1296 # i18n: column positioning for "hg log"
1293 1297 self.ui.write(_("summary: %s\n") %
1294 1298 description.splitlines()[0],
1295 1299 label='log.summary')
1296 1300 self.ui.write("\n")
1297 1301
1298 1302 self.showpatch(ctx, matchfn)
1299 1303
1300 1304 def showpatch(self, ctx, matchfn):
1301 1305 if not matchfn:
1302 1306 matchfn = self.matchfn
1303 1307 if matchfn:
1304 1308 stat = self.diffopts.get('stat')
1305 1309 diff = self.diffopts.get('patch')
1306 1310 diffopts = patch.diffallopts(self.ui, self.diffopts)
1307 1311 node = ctx.node()
1308 1312 prev = ctx.p1().node()
1309 1313 if stat:
1310 1314 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1311 1315 match=matchfn, stat=True)
1312 1316 if diff:
1313 1317 if stat:
1314 1318 self.ui.write("\n")
1315 1319 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1316 1320 match=matchfn, stat=False)
1317 1321 self.ui.write("\n")
1318 1322
1319 1323 class jsonchangeset(changeset_printer):
1320 1324 '''format changeset information.'''
1321 1325
1322 1326 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1323 1327 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1324 1328 self.cache = {}
1325 1329 self._first = True
1326 1330
1327 1331 def close(self):
1328 1332 if not self._first:
1329 1333 self.ui.write("\n]\n")
1330 1334 else:
1331 1335 self.ui.write("[]\n")
1332 1336
1333 1337 def _show(self, ctx, copies, matchfn, props):
1334 1338 '''show a single changeset or file revision'''
1335 1339 rev = ctx.rev()
1336 1340 if rev is None:
1337 1341 jrev = jnode = 'null'
1338 1342 else:
1339 1343 jrev = str(rev)
1340 1344 jnode = '"%s"' % hex(ctx.node())
1341 1345 j = encoding.jsonescape
1342 1346
1343 1347 if self._first:
1344 1348 self.ui.write("[\n {")
1345 1349 self._first = False
1346 1350 else:
1347 1351 self.ui.write(",\n {")
1348 1352
1349 1353 if self.ui.quiet:
1350 1354 self.ui.write('\n "rev": %s' % jrev)
1351 1355 self.ui.write(',\n "node": %s' % jnode)
1352 1356 self.ui.write('\n }')
1353 1357 return
1354 1358
1355 1359 self.ui.write('\n "rev": %s' % jrev)
1356 1360 self.ui.write(',\n "node": %s' % jnode)
1357 1361 self.ui.write(',\n "branch": "%s"' % j(ctx.branch()))
1358 1362 self.ui.write(',\n "phase": "%s"' % ctx.phasestr())
1359 1363 self.ui.write(',\n "user": "%s"' % j(ctx.user()))
1360 1364 self.ui.write(',\n "date": [%d, %d]' % ctx.date())
1361 1365 self.ui.write(',\n "desc": "%s"' % j(ctx.description()))
1362 1366
1363 1367 self.ui.write(',\n "bookmarks": [%s]' %
1364 1368 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1365 1369 self.ui.write(',\n "tags": [%s]' %
1366 1370 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1367 1371 self.ui.write(',\n "parents": [%s]' %
1368 1372 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1369 1373
1370 1374 if self.ui.debugflag:
1371 1375 if rev is None:
1372 1376 jmanifestnode = 'null'
1373 1377 else:
1374 1378 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1375 1379 self.ui.write(',\n "manifest": %s' % jmanifestnode)
1376 1380
1377 1381 self.ui.write(',\n "extra": {%s}' %
1378 1382 ", ".join('"%s": "%s"' % (j(k), j(v))
1379 1383 for k, v in ctx.extra().items()))
1380 1384
1381 1385 files = ctx.p1().status(ctx)
1382 1386 self.ui.write(',\n "modified": [%s]' %
1383 1387 ", ".join('"%s"' % j(f) for f in files[0]))
1384 1388 self.ui.write(',\n "added": [%s]' %
1385 1389 ", ".join('"%s"' % j(f) for f in files[1]))
1386 1390 self.ui.write(',\n "removed": [%s]' %
1387 1391 ", ".join('"%s"' % j(f) for f in files[2]))
1388 1392
1389 1393 elif self.ui.verbose:
1390 1394 self.ui.write(',\n "files": [%s]' %
1391 1395 ", ".join('"%s"' % j(f) for f in ctx.files()))
1392 1396
1393 1397 if copies:
1394 1398 self.ui.write(',\n "copies": {%s}' %
1395 1399 ", ".join('"%s": "%s"' % (j(k), j(v))
1396 1400 for k, v in copies))
1397 1401
1398 1402 matchfn = self.matchfn
1399 1403 if matchfn:
1400 1404 stat = self.diffopts.get('stat')
1401 1405 diff = self.diffopts.get('patch')
1402 1406 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1403 1407 node, prev = ctx.node(), ctx.p1().node()
1404 1408 if stat:
1405 1409 self.ui.pushbuffer()
1406 1410 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1407 1411 match=matchfn, stat=True)
1408 1412 self.ui.write(',\n "diffstat": "%s"' % j(self.ui.popbuffer()))
1409 1413 if diff:
1410 1414 self.ui.pushbuffer()
1411 1415 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1412 1416 match=matchfn, stat=False)
1413 1417 self.ui.write(',\n "diff": "%s"' % j(self.ui.popbuffer()))
1414 1418
1415 1419 self.ui.write("\n }")
1416 1420
1417 1421 class changeset_templater(changeset_printer):
1418 1422 '''format changeset information.'''
1419 1423
1420 1424 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1421 1425 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1422 1426 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
1423 1427 defaulttempl = {
1424 1428 'parent': '{rev}:{node|formatnode} ',
1425 1429 'manifest': '{rev}:{node|formatnode}',
1426 1430 'file_copy': '{name} ({source})',
1427 1431 'extra': '{key}={value|stringescape}'
1428 1432 }
1429 1433 # filecopy is preserved for compatibility reasons
1430 1434 defaulttempl['filecopy'] = defaulttempl['file_copy']
1431 1435 self.t = templater.templater(mapfile, {'formatnode': formatnode},
1432 1436 cache=defaulttempl)
1433 1437 if tmpl:
1434 1438 self.t.cache['changeset'] = tmpl
1435 1439
1436 1440 self.cache = {}
1437 1441
1438 1442 # find correct templates for current mode
1439 1443 tmplmodes = [
1440 1444 (True, None),
1441 1445 (self.ui.verbose, 'verbose'),
1442 1446 (self.ui.quiet, 'quiet'),
1443 1447 (self.ui.debugflag, 'debug'),
1444 1448 ]
1445 1449
1446 1450 self._parts = {'header': '', 'footer': '', 'changeset': 'changeset',
1447 1451 'docheader': '', 'docfooter': ''}
1448 1452 for mode, postfix in tmplmodes:
1449 1453 for t in self._parts:
1450 1454 cur = t
1451 1455 if postfix:
1452 1456 cur += "_" + postfix
1453 1457 if mode and cur in self.t:
1454 1458 self._parts[t] = cur
1455 1459
1456 1460 if self._parts['docheader']:
1457 1461 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1458 1462
1459 1463 def close(self):
1460 1464 if self._parts['docfooter']:
1461 1465 if not self.footer:
1462 1466 self.footer = ""
1463 1467 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1464 1468 return super(changeset_templater, self).close()
1465 1469
1466 1470 def _show(self, ctx, copies, matchfn, props):
1467 1471 '''show a single changeset or file revision'''
1468 1472 props = props.copy()
1469 1473 props.update(templatekw.keywords)
1470 1474 props['templ'] = self.t
1471 1475 props['ctx'] = ctx
1472 1476 props['repo'] = self.repo
1473 1477 props['revcache'] = {'copies': copies}
1474 1478 props['cache'] = self.cache
1475 1479
1476 1480 try:
1477 1481 # write header
1478 1482 if self._parts['header']:
1479 1483 h = templater.stringify(self.t(self._parts['header'], **props))
1480 1484 if self.buffered:
1481 1485 self.header[ctx.rev()] = h
1482 1486 else:
1483 1487 if self.lastheader != h:
1484 1488 self.lastheader = h
1485 1489 self.ui.write(h)
1486 1490
1487 1491 # write changeset metadata, then patch if requested
1488 1492 key = self._parts['changeset']
1489 1493 self.ui.write(templater.stringify(self.t(key, **props)))
1490 1494 self.showpatch(ctx, matchfn)
1491 1495
1492 1496 if self._parts['footer']:
1493 1497 if not self.footer:
1494 1498 self.footer = templater.stringify(
1495 1499 self.t(self._parts['footer'], **props))
1496 1500 except KeyError as inst:
1497 1501 msg = _("%s: no key named '%s'")
1498 1502 raise error.Abort(msg % (self.t.mapfile, inst.args[0]))
1499 1503 except SyntaxError as inst:
1500 1504 raise error.Abort('%s: %s' % (self.t.mapfile, inst.args[0]))
1501 1505
1502 1506 def gettemplate(ui, tmpl, style):
1503 1507 """
1504 1508 Find the template matching the given template spec or style.
1505 1509 """
1506 1510
1507 1511 # ui settings
1508 1512 if not tmpl and not style: # template are stronger than style
1509 1513 tmpl = ui.config('ui', 'logtemplate')
1510 1514 if tmpl:
1511 1515 try:
1512 1516 tmpl = templater.unquotestring(tmpl)
1513 1517 except SyntaxError:
1514 1518 pass
1515 1519 return tmpl, None
1516 1520 else:
1517 1521 style = util.expandpath(ui.config('ui', 'style', ''))
1518 1522
1519 1523 if not tmpl and style:
1520 1524 mapfile = style
1521 1525 if not os.path.split(mapfile)[0]:
1522 1526 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1523 1527 or templater.templatepath(mapfile))
1524 1528 if mapname:
1525 1529 mapfile = mapname
1526 1530 return None, mapfile
1527 1531
1528 1532 if not tmpl:
1529 1533 return None, None
1530 1534
1531 1535 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1532 1536
1533 1537 def show_changeset(ui, repo, opts, buffered=False):
1534 1538 """show one changeset using template or regular display.
1535 1539
1536 1540 Display format will be the first non-empty hit of:
1537 1541 1. option 'template'
1538 1542 2. option 'style'
1539 1543 3. [ui] setting 'logtemplate'
1540 1544 4. [ui] setting 'style'
1541 1545 If all of these values are either the unset or the empty string,
1542 1546 regular display via changeset_printer() is done.
1543 1547 """
1544 1548 # options
1545 1549 matchfn = None
1546 1550 if opts.get('patch') or opts.get('stat'):
1547 1551 matchfn = scmutil.matchall(repo)
1548 1552
1549 1553 if opts.get('template') == 'json':
1550 1554 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1551 1555
1552 1556 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1553 1557
1554 1558 if not tmpl and not mapfile:
1555 1559 return changeset_printer(ui, repo, matchfn, opts, buffered)
1556 1560
1557 1561 try:
1558 1562 t = changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile,
1559 1563 buffered)
1560 1564 except SyntaxError as inst:
1561 1565 raise error.Abort(inst.args[0])
1562 1566 return t
1563 1567
1564 1568 def showmarker(ui, marker):
1565 1569 """utility function to display obsolescence marker in a readable way
1566 1570
1567 1571 To be used by debug function."""
1568 1572 ui.write(hex(marker.precnode()))
1569 1573 for repl in marker.succnodes():
1570 1574 ui.write(' ')
1571 1575 ui.write(hex(repl))
1572 1576 ui.write(' %X ' % marker.flags())
1573 1577 parents = marker.parentnodes()
1574 1578 if parents is not None:
1575 1579 ui.write('{%s} ' % ', '.join(hex(p) for p in parents))
1576 1580 ui.write('(%s) ' % util.datestr(marker.date()))
1577 1581 ui.write('{%s}' % (', '.join('%r: %r' % t for t in
1578 1582 sorted(marker.metadata().items())
1579 1583 if t[0] != 'date')))
1580 1584 ui.write('\n')
1581 1585
1582 1586 def finddate(ui, repo, date):
1583 1587 """Find the tipmost changeset that matches the given date spec"""
1584 1588
1585 1589 df = util.matchdate(date)
1586 1590 m = scmutil.matchall(repo)
1587 1591 results = {}
1588 1592
1589 1593 def prep(ctx, fns):
1590 1594 d = ctx.date()
1591 1595 if df(d[0]):
1592 1596 results[ctx.rev()] = d
1593 1597
1594 1598 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1595 1599 rev = ctx.rev()
1596 1600 if rev in results:
1597 1601 ui.status(_("found revision %s from %s\n") %
1598 1602 (rev, util.datestr(results[rev])))
1599 1603 return str(rev)
1600 1604
1601 1605 raise error.Abort(_("revision matching date not found"))
1602 1606
1603 1607 def increasingwindows(windowsize=8, sizelimit=512):
1604 1608 while True:
1605 1609 yield windowsize
1606 1610 if windowsize < sizelimit:
1607 1611 windowsize *= 2
1608 1612
1609 1613 class FileWalkError(Exception):
1610 1614 pass
1611 1615
1612 1616 def walkfilerevs(repo, match, follow, revs, fncache):
1613 1617 '''Walks the file history for the matched files.
1614 1618
1615 1619 Returns the changeset revs that are involved in the file history.
1616 1620
1617 1621 Throws FileWalkError if the file history can't be walked using
1618 1622 filelogs alone.
1619 1623 '''
1620 1624 wanted = set()
1621 1625 copies = []
1622 1626 minrev, maxrev = min(revs), max(revs)
1623 1627 def filerevgen(filelog, last):
1624 1628 """
1625 1629 Only files, no patterns. Check the history of each file.
1626 1630
1627 1631 Examines filelog entries within minrev, maxrev linkrev range
1628 1632 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1629 1633 tuples in backwards order
1630 1634 """
1631 1635 cl_count = len(repo)
1632 1636 revs = []
1633 1637 for j in xrange(0, last + 1):
1634 1638 linkrev = filelog.linkrev(j)
1635 1639 if linkrev < minrev:
1636 1640 continue
1637 1641 # only yield rev for which we have the changelog, it can
1638 1642 # happen while doing "hg log" during a pull or commit
1639 1643 if linkrev >= cl_count:
1640 1644 break
1641 1645
1642 1646 parentlinkrevs = []
1643 1647 for p in filelog.parentrevs(j):
1644 1648 if p != nullrev:
1645 1649 parentlinkrevs.append(filelog.linkrev(p))
1646 1650 n = filelog.node(j)
1647 1651 revs.append((linkrev, parentlinkrevs,
1648 1652 follow and filelog.renamed(n)))
1649 1653
1650 1654 return reversed(revs)
1651 1655 def iterfiles():
1652 1656 pctx = repo['.']
1653 1657 for filename in match.files():
1654 1658 if follow:
1655 1659 if filename not in pctx:
1656 1660 raise error.Abort(_('cannot follow file not in parent '
1657 1661 'revision: "%s"') % filename)
1658 1662 yield filename, pctx[filename].filenode()
1659 1663 else:
1660 1664 yield filename, None
1661 1665 for filename_node in copies:
1662 1666 yield filename_node
1663 1667
1664 1668 for file_, node in iterfiles():
1665 1669 filelog = repo.file(file_)
1666 1670 if not len(filelog):
1667 1671 if node is None:
1668 1672 # A zero count may be a directory or deleted file, so
1669 1673 # try to find matching entries on the slow path.
1670 1674 if follow:
1671 1675 raise error.Abort(
1672 1676 _('cannot follow nonexistent file: "%s"') % file_)
1673 1677 raise FileWalkError("Cannot walk via filelog")
1674 1678 else:
1675 1679 continue
1676 1680
1677 1681 if node is None:
1678 1682 last = len(filelog) - 1
1679 1683 else:
1680 1684 last = filelog.rev(node)
1681 1685
1682 1686 # keep track of all ancestors of the file
1683 1687 ancestors = set([filelog.linkrev(last)])
1684 1688
1685 1689 # iterate from latest to oldest revision
1686 1690 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1687 1691 if not follow:
1688 1692 if rev > maxrev:
1689 1693 continue
1690 1694 else:
1691 1695 # Note that last might not be the first interesting
1692 1696 # rev to us:
1693 1697 # if the file has been changed after maxrev, we'll
1694 1698 # have linkrev(last) > maxrev, and we still need
1695 1699 # to explore the file graph
1696 1700 if rev not in ancestors:
1697 1701 continue
1698 1702 # XXX insert 1327 fix here
1699 1703 if flparentlinkrevs:
1700 1704 ancestors.update(flparentlinkrevs)
1701 1705
1702 1706 fncache.setdefault(rev, []).append(file_)
1703 1707 wanted.add(rev)
1704 1708 if copied:
1705 1709 copies.append(copied)
1706 1710
1707 1711 return wanted
1708 1712
1709 1713 class _followfilter(object):
1710 1714 def __init__(self, repo, onlyfirst=False):
1711 1715 self.repo = repo
1712 1716 self.startrev = nullrev
1713 1717 self.roots = set()
1714 1718 self.onlyfirst = onlyfirst
1715 1719
1716 1720 def match(self, rev):
1717 1721 def realparents(rev):
1718 1722 if self.onlyfirst:
1719 1723 return self.repo.changelog.parentrevs(rev)[0:1]
1720 1724 else:
1721 1725 return filter(lambda x: x != nullrev,
1722 1726 self.repo.changelog.parentrevs(rev))
1723 1727
1724 1728 if self.startrev == nullrev:
1725 1729 self.startrev = rev
1726 1730 return True
1727 1731
1728 1732 if rev > self.startrev:
1729 1733 # forward: all descendants
1730 1734 if not self.roots:
1731 1735 self.roots.add(self.startrev)
1732 1736 for parent in realparents(rev):
1733 1737 if parent in self.roots:
1734 1738 self.roots.add(rev)
1735 1739 return True
1736 1740 else:
1737 1741 # backwards: all parents
1738 1742 if not self.roots:
1739 1743 self.roots.update(realparents(self.startrev))
1740 1744 if rev in self.roots:
1741 1745 self.roots.remove(rev)
1742 1746 self.roots.update(realparents(rev))
1743 1747 return True
1744 1748
1745 1749 return False
1746 1750
1747 1751 def walkchangerevs(repo, match, opts, prepare):
1748 1752 '''Iterate over files and the revs in which they changed.
1749 1753
1750 1754 Callers most commonly need to iterate backwards over the history
1751 1755 in which they are interested. Doing so has awful (quadratic-looking)
1752 1756 performance, so we use iterators in a "windowed" way.
1753 1757
1754 1758 We walk a window of revisions in the desired order. Within the
1755 1759 window, we first walk forwards to gather data, then in the desired
1756 1760 order (usually backwards) to display it.
1757 1761
1758 1762 This function returns an iterator yielding contexts. Before
1759 1763 yielding each context, the iterator will first call the prepare
1760 1764 function on each context in the window in forward order.'''
1761 1765
1762 1766 follow = opts.get('follow') or opts.get('follow_first')
1763 1767 revs = _logrevs(repo, opts)
1764 1768 if not revs:
1765 1769 return []
1766 1770 wanted = set()
1767 1771 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1768 1772 opts.get('removed'))
1769 1773 fncache = {}
1770 1774 change = repo.changectx
1771 1775
1772 1776 # First step is to fill wanted, the set of revisions that we want to yield.
1773 1777 # When it does not induce extra cost, we also fill fncache for revisions in
1774 1778 # wanted: a cache of filenames that were changed (ctx.files()) and that
1775 1779 # match the file filtering conditions.
1776 1780
1777 1781 if match.always():
1778 1782 # No files, no patterns. Display all revs.
1779 1783 wanted = revs
1780 1784 elif not slowpath:
1781 1785 # We only have to read through the filelog to find wanted revisions
1782 1786
1783 1787 try:
1784 1788 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1785 1789 except FileWalkError:
1786 1790 slowpath = True
1787 1791
1788 1792 # We decided to fall back to the slowpath because at least one
1789 1793 # of the paths was not a file. Check to see if at least one of them
1790 1794 # existed in history, otherwise simply return
1791 1795 for path in match.files():
1792 1796 if path == '.' or path in repo.store:
1793 1797 break
1794 1798 else:
1795 1799 return []
1796 1800
1797 1801 if slowpath:
1798 1802 # We have to read the changelog to match filenames against
1799 1803 # changed files
1800 1804
1801 1805 if follow:
1802 1806 raise error.Abort(_('can only follow copies/renames for explicit '
1803 1807 'filenames'))
1804 1808
1805 1809 # The slow path checks files modified in every changeset.
1806 1810 # This is really slow on large repos, so compute the set lazily.
1807 1811 class lazywantedset(object):
1808 1812 def __init__(self):
1809 1813 self.set = set()
1810 1814 self.revs = set(revs)
1811 1815
1812 1816 # No need to worry about locality here because it will be accessed
1813 1817 # in the same order as the increasing window below.
1814 1818 def __contains__(self, value):
1815 1819 if value in self.set:
1816 1820 return True
1817 1821 elif not value in self.revs:
1818 1822 return False
1819 1823 else:
1820 1824 self.revs.discard(value)
1821 1825 ctx = change(value)
1822 1826 matches = filter(match, ctx.files())
1823 1827 if matches:
1824 1828 fncache[value] = matches
1825 1829 self.set.add(value)
1826 1830 return True
1827 1831 return False
1828 1832
1829 1833 def discard(self, value):
1830 1834 self.revs.discard(value)
1831 1835 self.set.discard(value)
1832 1836
1833 1837 wanted = lazywantedset()
1834 1838
1835 1839 # it might be worthwhile to do this in the iterator if the rev range
1836 1840 # is descending and the prune args are all within that range
1837 1841 for rev in opts.get('prune', ()):
1838 1842 rev = repo[rev].rev()
1839 1843 ff = _followfilter(repo)
1840 1844 stop = min(revs[0], revs[-1])
1841 1845 for x in xrange(rev, stop - 1, -1):
1842 1846 if ff.match(x):
1843 1847 wanted = wanted - [x]
1844 1848
1845 1849 # Now that wanted is correctly initialized, we can iterate over the
1846 1850 # revision range, yielding only revisions in wanted.
1847 1851 def iterate():
1848 1852 if follow and match.always():
1849 1853 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1850 1854 def want(rev):
1851 1855 return ff.match(rev) and rev in wanted
1852 1856 else:
1853 1857 def want(rev):
1854 1858 return rev in wanted
1855 1859
1856 1860 it = iter(revs)
1857 1861 stopiteration = False
1858 1862 for windowsize in increasingwindows():
1859 1863 nrevs = []
1860 1864 for i in xrange(windowsize):
1861 1865 rev = next(it, None)
1862 1866 if rev is None:
1863 1867 stopiteration = True
1864 1868 break
1865 1869 elif want(rev):
1866 1870 nrevs.append(rev)
1867 1871 for rev in sorted(nrevs):
1868 1872 fns = fncache.get(rev)
1869 1873 ctx = change(rev)
1870 1874 if not fns:
1871 1875 def fns_generator():
1872 1876 for f in ctx.files():
1873 1877 if match(f):
1874 1878 yield f
1875 1879 fns = fns_generator()
1876 1880 prepare(ctx, fns)
1877 1881 for rev in nrevs:
1878 1882 yield change(rev)
1879 1883
1880 1884 if stopiteration:
1881 1885 break
1882 1886
1883 1887 return iterate()
1884 1888
1885 1889 def _makefollowlogfilematcher(repo, files, followfirst):
1886 1890 # When displaying a revision with --patch --follow FILE, we have
1887 1891 # to know which file of the revision must be diffed. With
1888 1892 # --follow, we want the names of the ancestors of FILE in the
1889 1893 # revision, stored in "fcache". "fcache" is populated by
1890 1894 # reproducing the graph traversal already done by --follow revset
1891 1895 # and relating linkrevs to file names (which is not "correct" but
1892 1896 # good enough).
1893 1897 fcache = {}
1894 1898 fcacheready = [False]
1895 1899 pctx = repo['.']
1896 1900
1897 1901 def populate():
1898 1902 for fn in files:
1899 1903 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
1900 1904 for c in i:
1901 1905 fcache.setdefault(c.linkrev(), set()).add(c.path())
1902 1906
1903 1907 def filematcher(rev):
1904 1908 if not fcacheready[0]:
1905 1909 # Lazy initialization
1906 1910 fcacheready[0] = True
1907 1911 populate()
1908 1912 return scmutil.matchfiles(repo, fcache.get(rev, []))
1909 1913
1910 1914 return filematcher
1911 1915
1912 1916 def _makenofollowlogfilematcher(repo, pats, opts):
1913 1917 '''hook for extensions to override the filematcher for non-follow cases'''
1914 1918 return None
1915 1919
1916 1920 def _makelogrevset(repo, pats, opts, revs):
1917 1921 """Return (expr, filematcher) where expr is a revset string built
1918 1922 from log options and file patterns or None. If --stat or --patch
1919 1923 are not passed filematcher is None. Otherwise it is a callable
1920 1924 taking a revision number and returning a match objects filtering
1921 1925 the files to be detailed when displaying the revision.
1922 1926 """
1923 1927 opt2revset = {
1924 1928 'no_merges': ('not merge()', None),
1925 1929 'only_merges': ('merge()', None),
1926 1930 '_ancestors': ('ancestors(%(val)s)', None),
1927 1931 '_fancestors': ('_firstancestors(%(val)s)', None),
1928 1932 '_descendants': ('descendants(%(val)s)', None),
1929 1933 '_fdescendants': ('_firstdescendants(%(val)s)', None),
1930 1934 '_matchfiles': ('_matchfiles(%(val)s)', None),
1931 1935 'date': ('date(%(val)r)', None),
1932 1936 'branch': ('branch(%(val)r)', ' or '),
1933 1937 '_patslog': ('filelog(%(val)r)', ' or '),
1934 1938 '_patsfollow': ('follow(%(val)r)', ' or '),
1935 1939 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
1936 1940 'keyword': ('keyword(%(val)r)', ' or '),
1937 1941 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
1938 1942 'user': ('user(%(val)r)', ' or '),
1939 1943 }
1940 1944
1941 1945 opts = dict(opts)
1942 1946 # follow or not follow?
1943 1947 follow = opts.get('follow') or opts.get('follow_first')
1944 1948 if opts.get('follow_first'):
1945 1949 followfirst = 1
1946 1950 else:
1947 1951 followfirst = 0
1948 1952 # --follow with FILE behavior depends on revs...
1949 1953 it = iter(revs)
1950 1954 startrev = it.next()
1951 1955 followdescendants = startrev < next(it, startrev)
1952 1956
1953 1957 # branch and only_branch are really aliases and must be handled at
1954 1958 # the same time
1955 1959 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
1956 1960 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
1957 1961 # pats/include/exclude are passed to match.match() directly in
1958 1962 # _matchfiles() revset but walkchangerevs() builds its matcher with
1959 1963 # scmutil.match(). The difference is input pats are globbed on
1960 1964 # platforms without shell expansion (windows).
1961 1965 wctx = repo[None]
1962 1966 match, pats = scmutil.matchandpats(wctx, pats, opts)
1963 1967 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1964 1968 opts.get('removed'))
1965 1969 if not slowpath:
1966 1970 for f in match.files():
1967 1971 if follow and f not in wctx:
1968 1972 # If the file exists, it may be a directory, so let it
1969 1973 # take the slow path.
1970 1974 if os.path.exists(repo.wjoin(f)):
1971 1975 slowpath = True
1972 1976 continue
1973 1977 else:
1974 1978 raise error.Abort(_('cannot follow file not in parent '
1975 1979 'revision: "%s"') % f)
1976 1980 filelog = repo.file(f)
1977 1981 if not filelog:
1978 1982 # A zero count may be a directory or deleted file, so
1979 1983 # try to find matching entries on the slow path.
1980 1984 if follow:
1981 1985 raise error.Abort(
1982 1986 _('cannot follow nonexistent file: "%s"') % f)
1983 1987 slowpath = True
1984 1988
1985 1989 # We decided to fall back to the slowpath because at least one
1986 1990 # of the paths was not a file. Check to see if at least one of them
1987 1991 # existed in history - in that case, we'll continue down the
1988 1992 # slowpath; otherwise, we can turn off the slowpath
1989 1993 if slowpath:
1990 1994 for path in match.files():
1991 1995 if path == '.' or path in repo.store:
1992 1996 break
1993 1997 else:
1994 1998 slowpath = False
1995 1999
1996 2000 fpats = ('_patsfollow', '_patsfollowfirst')
1997 2001 fnopats = (('_ancestors', '_fancestors'),
1998 2002 ('_descendants', '_fdescendants'))
1999 2003 if slowpath:
2000 2004 # See walkchangerevs() slow path.
2001 2005 #
2002 2006 # pats/include/exclude cannot be represented as separate
2003 2007 # revset expressions as their filtering logic applies at file
2004 2008 # level. For instance "-I a -X a" matches a revision touching
2005 2009 # "a" and "b" while "file(a) and not file(b)" does
2006 2010 # not. Besides, filesets are evaluated against the working
2007 2011 # directory.
2008 2012 matchargs = ['r:', 'd:relpath']
2009 2013 for p in pats:
2010 2014 matchargs.append('p:' + p)
2011 2015 for p in opts.get('include', []):
2012 2016 matchargs.append('i:' + p)
2013 2017 for p in opts.get('exclude', []):
2014 2018 matchargs.append('x:' + p)
2015 2019 matchargs = ','.join(('%r' % p) for p in matchargs)
2016 2020 opts['_matchfiles'] = matchargs
2017 2021 if follow:
2018 2022 opts[fnopats[0][followfirst]] = '.'
2019 2023 else:
2020 2024 if follow:
2021 2025 if pats:
2022 2026 # follow() revset interprets its file argument as a
2023 2027 # manifest entry, so use match.files(), not pats.
2024 2028 opts[fpats[followfirst]] = list(match.files())
2025 2029 else:
2026 2030 op = fnopats[followdescendants][followfirst]
2027 2031 opts[op] = 'rev(%d)' % startrev
2028 2032 else:
2029 2033 opts['_patslog'] = list(pats)
2030 2034
2031 2035 filematcher = None
2032 2036 if opts.get('patch') or opts.get('stat'):
2033 2037 # When following files, track renames via a special matcher.
2034 2038 # If we're forced to take the slowpath it means we're following
2035 2039 # at least one pattern/directory, so don't bother with rename tracking.
2036 2040 if follow and not match.always() and not slowpath:
2037 2041 # _makefollowlogfilematcher expects its files argument to be
2038 2042 # relative to the repo root, so use match.files(), not pats.
2039 2043 filematcher = _makefollowlogfilematcher(repo, match.files(),
2040 2044 followfirst)
2041 2045 else:
2042 2046 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2043 2047 if filematcher is None:
2044 2048 filematcher = lambda rev: match
2045 2049
2046 2050 expr = []
2047 2051 for op, val in sorted(opts.iteritems()):
2048 2052 if not val:
2049 2053 continue
2050 2054 if op not in opt2revset:
2051 2055 continue
2052 2056 revop, andor = opt2revset[op]
2053 2057 if '%(val)' not in revop:
2054 2058 expr.append(revop)
2055 2059 else:
2056 2060 if not isinstance(val, list):
2057 2061 e = revop % {'val': val}
2058 2062 else:
2059 2063 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2060 2064 expr.append(e)
2061 2065
2062 2066 if expr:
2063 2067 expr = '(' + ' and '.join(expr) + ')'
2064 2068 else:
2065 2069 expr = None
2066 2070 return expr, filematcher
2067 2071
2068 2072 def _logrevs(repo, opts):
2069 2073 # Default --rev value depends on --follow but --follow behavior
2070 2074 # depends on revisions resolved from --rev...
2071 2075 follow = opts.get('follow') or opts.get('follow_first')
2072 2076 if opts.get('rev'):
2073 2077 revs = scmutil.revrange(repo, opts['rev'])
2074 2078 elif follow and repo.dirstate.p1() == nullid:
2075 2079 revs = revset.baseset()
2076 2080 elif follow:
2077 2081 revs = repo.revs('reverse(:.)')
2078 2082 else:
2079 2083 revs = revset.spanset(repo)
2080 2084 revs.reverse()
2081 2085 return revs
2082 2086
2083 2087 def getgraphlogrevs(repo, pats, opts):
2084 2088 """Return (revs, expr, filematcher) where revs is an iterable of
2085 2089 revision numbers, expr is a revset string built from log options
2086 2090 and file patterns or None, and used to filter 'revs'. If --stat or
2087 2091 --patch are not passed filematcher is None. Otherwise it is a
2088 2092 callable taking a revision number and returning a match objects
2089 2093 filtering the files to be detailed when displaying the revision.
2090 2094 """
2091 2095 limit = loglimit(opts)
2092 2096 revs = _logrevs(repo, opts)
2093 2097 if not revs:
2094 2098 return revset.baseset(), None, None
2095 2099 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2096 2100 if opts.get('rev'):
2097 2101 # User-specified revs might be unsorted, but don't sort before
2098 2102 # _makelogrevset because it might depend on the order of revs
2099 2103 revs.sort(reverse=True)
2100 2104 if expr:
2101 2105 # Revset matchers often operate faster on revisions in changelog
2102 2106 # order, because most filters deal with the changelog.
2103 2107 revs.reverse()
2104 2108 matcher = revset.match(repo.ui, expr)
2105 2109 # Revset matches can reorder revisions. "A or B" typically returns
2106 2110 # returns the revision matching A then the revision matching B. Sort
2107 2111 # again to fix that.
2108 2112 revs = matcher(repo, revs)
2109 2113 revs.sort(reverse=True)
2110 2114 if limit is not None:
2111 2115 limitedrevs = []
2112 2116 for idx, rev in enumerate(revs):
2113 2117 if idx >= limit:
2114 2118 break
2115 2119 limitedrevs.append(rev)
2116 2120 revs = revset.baseset(limitedrevs)
2117 2121
2118 2122 return revs, expr, filematcher
2119 2123
2120 2124 def getlogrevs(repo, pats, opts):
2121 2125 """Return (revs, expr, filematcher) where revs is an iterable of
2122 2126 revision numbers, expr is a revset string built from log options
2123 2127 and file patterns or None, and used to filter 'revs'. If --stat or
2124 2128 --patch are not passed filematcher is None. Otherwise it is a
2125 2129 callable taking a revision number and returning a match objects
2126 2130 filtering the files to be detailed when displaying the revision.
2127 2131 """
2128 2132 limit = loglimit(opts)
2129 2133 revs = _logrevs(repo, opts)
2130 2134 if not revs:
2131 2135 return revset.baseset([]), None, None
2132 2136 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2133 2137 if expr:
2134 2138 # Revset matchers often operate faster on revisions in changelog
2135 2139 # order, because most filters deal with the changelog.
2136 2140 if not opts.get('rev'):
2137 2141 revs.reverse()
2138 2142 matcher = revset.match(repo.ui, expr)
2139 2143 # Revset matches can reorder revisions. "A or B" typically returns
2140 2144 # returns the revision matching A then the revision matching B. Sort
2141 2145 # again to fix that.
2142 2146 revs = matcher(repo, revs)
2143 2147 if not opts.get('rev'):
2144 2148 revs.sort(reverse=True)
2145 2149 if limit is not None:
2146 2150 limitedrevs = []
2147 2151 for idx, r in enumerate(revs):
2148 2152 if limit <= idx:
2149 2153 break
2150 2154 limitedrevs.append(r)
2151 2155 revs = revset.baseset(limitedrevs)
2152 2156
2153 2157 return revs, expr, filematcher
2154 2158
2155 2159 def _graphnodeformatter(ui, displayer):
2156 2160 spec = ui.config('ui', 'graphnodetemplate')
2157 2161 if not spec:
2158 2162 return templatekw.showgraphnode # fast path for "{graphnode}"
2159 2163
2160 2164 templ = formatter.gettemplater(ui, 'graphnode', spec)
2161 2165 cache = {}
2162 2166 if isinstance(displayer, changeset_templater):
2163 2167 cache = displayer.cache # reuse cache of slow templates
2164 2168 props = templatekw.keywords.copy()
2165 2169 props['templ'] = templ
2166 2170 props['cache'] = cache
2167 2171 def formatnode(repo, ctx):
2168 2172 props['ctx'] = ctx
2169 2173 props['repo'] = repo
2170 2174 props['revcache'] = {}
2171 2175 return templater.stringify(templ('graphnode', **props))
2172 2176 return formatnode
2173 2177
2174 2178 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2175 2179 filematcher=None):
2176 2180 formatnode = _graphnodeformatter(ui, displayer)
2177 2181 seen, state = [], graphmod.asciistate()
2178 2182 for rev, type, ctx, parents in dag:
2179 2183 char = formatnode(repo, ctx)
2180 2184 copies = None
2181 2185 if getrenamed and ctx.rev():
2182 2186 copies = []
2183 2187 for fn in ctx.files():
2184 2188 rename = getrenamed(fn, ctx.rev())
2185 2189 if rename:
2186 2190 copies.append((fn, rename[0]))
2187 2191 revmatchfn = None
2188 2192 if filematcher is not None:
2189 2193 revmatchfn = filematcher(ctx.rev())
2190 2194 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2191 2195 lines = displayer.hunk.pop(rev).split('\n')
2192 2196 if not lines[-1]:
2193 2197 del lines[-1]
2194 2198 displayer.flush(ctx)
2195 2199 edges = edgefn(type, char, lines, seen, rev, parents)
2196 2200 for type, char, lines, coldata in edges:
2197 2201 graphmod.ascii(ui, state, type, char, lines, coldata)
2198 2202 displayer.close()
2199 2203
2200 2204 def graphlog(ui, repo, *pats, **opts):
2201 2205 # Parameters are identical to log command ones
2202 2206 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2203 2207 revdag = graphmod.dagwalker(repo, revs)
2204 2208
2205 2209 getrenamed = None
2206 2210 if opts.get('copies'):
2207 2211 endrev = None
2208 2212 if opts.get('rev'):
2209 2213 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2210 2214 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2211 2215 displayer = show_changeset(ui, repo, opts, buffered=True)
2212 2216 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2213 2217 filematcher)
2214 2218
2215 2219 def checkunsupportedgraphflags(pats, opts):
2216 2220 for op in ["newest_first"]:
2217 2221 if op in opts and opts[op]:
2218 2222 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2219 2223 % op.replace("_", "-"))
2220 2224
2221 2225 def graphrevs(repo, nodes, opts):
2222 2226 limit = loglimit(opts)
2223 2227 nodes.reverse()
2224 2228 if limit is not None:
2225 2229 nodes = nodes[:limit]
2226 2230 return graphmod.nodes(repo, nodes)
2227 2231
2228 2232 def add(ui, repo, match, prefix, explicitonly, **opts):
2229 2233 join = lambda f: os.path.join(prefix, f)
2230 2234 bad = []
2231 2235
2232 2236 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2233 2237 names = []
2234 2238 wctx = repo[None]
2235 2239 cca = None
2236 2240 abort, warn = scmutil.checkportabilityalert(ui)
2237 2241 if abort or warn:
2238 2242 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2239 2243
2240 2244 badmatch = matchmod.badmatch(match, badfn)
2241 2245 dirstate = repo.dirstate
2242 2246 # We don't want to just call wctx.walk here, since it would return a lot of
2243 2247 # clean files, which we aren't interested in and takes time.
2244 2248 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2245 2249 True, False, full=False)):
2246 2250 exact = match.exact(f)
2247 2251 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2248 2252 if cca:
2249 2253 cca(f)
2250 2254 names.append(f)
2251 2255 if ui.verbose or not exact:
2252 2256 ui.status(_('adding %s\n') % match.rel(f))
2253 2257
2254 2258 for subpath in sorted(wctx.substate):
2255 2259 sub = wctx.sub(subpath)
2256 2260 try:
2257 2261 submatch = matchmod.subdirmatcher(subpath, match)
2258 2262 if opts.get('subrepos'):
2259 2263 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2260 2264 else:
2261 2265 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2262 2266 except error.LookupError:
2263 2267 ui.status(_("skipping missing subrepository: %s\n")
2264 2268 % join(subpath))
2265 2269
2266 2270 if not opts.get('dry_run'):
2267 2271 rejected = wctx.add(names, prefix)
2268 2272 bad.extend(f for f in rejected if f in match.files())
2269 2273 return bad
2270 2274
2271 2275 def forget(ui, repo, match, prefix, explicitonly):
2272 2276 join = lambda f: os.path.join(prefix, f)
2273 2277 bad = []
2274 2278 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2275 2279 wctx = repo[None]
2276 2280 forgot = []
2277 2281
2278 2282 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2279 2283 forget = sorted(s[0] + s[1] + s[3] + s[6])
2280 2284 if explicitonly:
2281 2285 forget = [f for f in forget if match.exact(f)]
2282 2286
2283 2287 for subpath in sorted(wctx.substate):
2284 2288 sub = wctx.sub(subpath)
2285 2289 try:
2286 2290 submatch = matchmod.subdirmatcher(subpath, match)
2287 2291 subbad, subforgot = sub.forget(submatch, prefix)
2288 2292 bad.extend([subpath + '/' + f for f in subbad])
2289 2293 forgot.extend([subpath + '/' + f for f in subforgot])
2290 2294 except error.LookupError:
2291 2295 ui.status(_("skipping missing subrepository: %s\n")
2292 2296 % join(subpath))
2293 2297
2294 2298 if not explicitonly:
2295 2299 for f in match.files():
2296 2300 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2297 2301 if f not in forgot:
2298 2302 if repo.wvfs.exists(f):
2299 2303 # Don't complain if the exact case match wasn't given.
2300 2304 # But don't do this until after checking 'forgot', so
2301 2305 # that subrepo files aren't normalized, and this op is
2302 2306 # purely from data cached by the status walk above.
2303 2307 if repo.dirstate.normalize(f) in repo.dirstate:
2304 2308 continue
2305 2309 ui.warn(_('not removing %s: '
2306 2310 'file is already untracked\n')
2307 2311 % match.rel(f))
2308 2312 bad.append(f)
2309 2313
2310 2314 for f in forget:
2311 2315 if ui.verbose or not match.exact(f):
2312 2316 ui.status(_('removing %s\n') % match.rel(f))
2313 2317
2314 2318 rejected = wctx.forget(forget, prefix)
2315 2319 bad.extend(f for f in rejected if f in match.files())
2316 2320 forgot.extend(f for f in forget if f not in rejected)
2317 2321 return bad, forgot
2318 2322
2319 2323 def files(ui, ctx, m, fm, fmt, subrepos):
2320 2324 rev = ctx.rev()
2321 2325 ret = 1
2322 2326 ds = ctx.repo().dirstate
2323 2327
2324 2328 for f in ctx.matches(m):
2325 2329 if rev is None and ds[f] == 'r':
2326 2330 continue
2327 2331 fm.startitem()
2328 2332 if ui.verbose:
2329 2333 fc = ctx[f]
2330 2334 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2331 2335 fm.data(abspath=f)
2332 2336 fm.write('path', fmt, m.rel(f))
2333 2337 ret = 0
2334 2338
2335 2339 for subpath in sorted(ctx.substate):
2336 2340 def matchessubrepo(subpath):
2337 2341 return (m.always() or m.exact(subpath)
2338 2342 or any(f.startswith(subpath + '/') for f in m.files()))
2339 2343
2340 2344 if subrepos or matchessubrepo(subpath):
2341 2345 sub = ctx.sub(subpath)
2342 2346 try:
2343 2347 submatch = matchmod.subdirmatcher(subpath, m)
2344 2348 if sub.printfiles(ui, submatch, fm, fmt, subrepos) == 0:
2345 2349 ret = 0
2346 2350 except error.LookupError:
2347 2351 ui.status(_("skipping missing subrepository: %s\n")
2348 2352 % m.abs(subpath))
2349 2353
2350 2354 return ret
2351 2355
2352 2356 def remove(ui, repo, m, prefix, after, force, subrepos):
2353 2357 join = lambda f: os.path.join(prefix, f)
2354 2358 ret = 0
2355 2359 s = repo.status(match=m, clean=True)
2356 2360 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2357 2361
2358 2362 wctx = repo[None]
2359 2363
2360 2364 for subpath in sorted(wctx.substate):
2361 2365 def matchessubrepo(matcher, subpath):
2362 2366 if matcher.exact(subpath):
2363 2367 return True
2364 2368 for f in matcher.files():
2365 2369 if f.startswith(subpath):
2366 2370 return True
2367 2371 return False
2368 2372
2369 2373 if subrepos or matchessubrepo(m, subpath):
2370 2374 sub = wctx.sub(subpath)
2371 2375 try:
2372 2376 submatch = matchmod.subdirmatcher(subpath, m)
2373 2377 if sub.removefiles(submatch, prefix, after, force, subrepos):
2374 2378 ret = 1
2375 2379 except error.LookupError:
2376 2380 ui.status(_("skipping missing subrepository: %s\n")
2377 2381 % join(subpath))
2378 2382
2379 2383 # warn about failure to delete explicit files/dirs
2380 2384 deleteddirs = util.dirs(deleted)
2381 2385 for f in m.files():
2382 2386 def insubrepo():
2383 2387 for subpath in wctx.substate:
2384 2388 if f.startswith(subpath):
2385 2389 return True
2386 2390 return False
2387 2391
2388 2392 isdir = f in deleteddirs or wctx.hasdir(f)
2389 2393 if f in repo.dirstate or isdir or f == '.' or insubrepo():
2390 2394 continue
2391 2395
2392 2396 if repo.wvfs.exists(f):
2393 2397 if repo.wvfs.isdir(f):
2394 2398 ui.warn(_('not removing %s: no tracked files\n')
2395 2399 % m.rel(f))
2396 2400 else:
2397 2401 ui.warn(_('not removing %s: file is untracked\n')
2398 2402 % m.rel(f))
2399 2403 # missing files will generate a warning elsewhere
2400 2404 ret = 1
2401 2405
2402 2406 if force:
2403 2407 list = modified + deleted + clean + added
2404 2408 elif after:
2405 2409 list = deleted
2406 2410 for f in modified + added + clean:
2407 2411 ui.warn(_('not removing %s: file still exists\n') % m.rel(f))
2408 2412 ret = 1
2409 2413 else:
2410 2414 list = deleted + clean
2411 2415 for f in modified:
2412 2416 ui.warn(_('not removing %s: file is modified (use -f'
2413 2417 ' to force removal)\n') % m.rel(f))
2414 2418 ret = 1
2415 2419 for f in added:
2416 2420 ui.warn(_('not removing %s: file has been marked for add'
2417 2421 ' (use forget to undo)\n') % m.rel(f))
2418 2422 ret = 1
2419 2423
2420 2424 for f in sorted(list):
2421 2425 if ui.verbose or not m.exact(f):
2422 2426 ui.status(_('removing %s\n') % m.rel(f))
2423 2427
2424 2428 with repo.wlock():
2425 2429 if not after:
2426 2430 for f in list:
2427 2431 if f in added:
2428 2432 continue # we never unlink added files on remove
2429 2433 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
2430 2434 repo[None].forget(list)
2431 2435
2432 2436 return ret
2433 2437
2434 2438 def cat(ui, repo, ctx, matcher, prefix, **opts):
2435 2439 err = 1
2436 2440
2437 2441 def write(path):
2438 2442 fp = makefileobj(repo, opts.get('output'), ctx.node(),
2439 2443 pathname=os.path.join(prefix, path))
2440 2444 data = ctx[path].data()
2441 2445 if opts.get('decode'):
2442 2446 data = repo.wwritedata(path, data)
2443 2447 fp.write(data)
2444 2448 fp.close()
2445 2449
2446 2450 # Automation often uses hg cat on single files, so special case it
2447 2451 # for performance to avoid the cost of parsing the manifest.
2448 2452 if len(matcher.files()) == 1 and not matcher.anypats():
2449 2453 file = matcher.files()[0]
2450 2454 mf = repo.manifest
2451 2455 mfnode = ctx.manifestnode()
2452 2456 if mfnode and mf.find(mfnode, file)[0]:
2453 2457 write(file)
2454 2458 return 0
2455 2459
2456 2460 # Don't warn about "missing" files that are really in subrepos
2457 2461 def badfn(path, msg):
2458 2462 for subpath in ctx.substate:
2459 2463 if path.startswith(subpath):
2460 2464 return
2461 2465 matcher.bad(path, msg)
2462 2466
2463 2467 for abs in ctx.walk(matchmod.badmatch(matcher, badfn)):
2464 2468 write(abs)
2465 2469 err = 0
2466 2470
2467 2471 for subpath in sorted(ctx.substate):
2468 2472 sub = ctx.sub(subpath)
2469 2473 try:
2470 2474 submatch = matchmod.subdirmatcher(subpath, matcher)
2471 2475
2472 2476 if not sub.cat(submatch, os.path.join(prefix, sub._path),
2473 2477 **opts):
2474 2478 err = 0
2475 2479 except error.RepoLookupError:
2476 2480 ui.status(_("skipping missing subrepository: %s\n")
2477 2481 % os.path.join(prefix, subpath))
2478 2482
2479 2483 return err
2480 2484
2481 2485 def commit(ui, repo, commitfunc, pats, opts):
2482 2486 '''commit the specified files or all outstanding changes'''
2483 2487 date = opts.get('date')
2484 2488 if date:
2485 2489 opts['date'] = util.parsedate(date)
2486 2490 message = logmessage(ui, opts)
2487 2491 matcher = scmutil.match(repo[None], pats, opts)
2488 2492
2489 2493 # extract addremove carefully -- this function can be called from a command
2490 2494 # that doesn't support addremove
2491 2495 if opts.get('addremove'):
2492 2496 if scmutil.addremove(repo, matcher, "", opts) != 0:
2493 2497 raise error.Abort(
2494 2498 _("failed to mark all new/missing files as added/removed"))
2495 2499
2496 2500 return commitfunc(ui, repo, message, matcher, opts)
2497 2501
2498 2502 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2499 2503 # avoid cycle context -> subrepo -> cmdutil
2500 2504 import context
2501 2505
2502 2506 # amend will reuse the existing user if not specified, but the obsolete
2503 2507 # marker creation requires that the current user's name is specified.
2504 2508 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2505 2509 ui.username() # raise exception if username not set
2506 2510
2507 2511 ui.note(_('amending changeset %s\n') % old)
2508 2512 base = old.p1()
2509 2513 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2510 2514
2511 2515 wlock = lock = newid = None
2512 2516 try:
2513 2517 wlock = repo.wlock()
2514 2518 lock = repo.lock()
2515 2519 with repo.transaction('amend') as tr:
2516 2520 # See if we got a message from -m or -l, if not, open the editor
2517 2521 # with the message of the changeset to amend
2518 2522 message = logmessage(ui, opts)
2519 2523 # ensure logfile does not conflict with later enforcement of the
2520 2524 # message. potential logfile content has been processed by
2521 2525 # `logmessage` anyway.
2522 2526 opts.pop('logfile')
2523 2527 # First, do a regular commit to record all changes in the working
2524 2528 # directory (if there are any)
2525 2529 ui.callhooks = False
2526 2530 activebookmark = repo._bookmarks.active
2527 2531 try:
2528 2532 repo._bookmarks.active = None
2529 2533 opts['message'] = 'temporary amend commit for %s' % old
2530 2534 node = commit(ui, repo, commitfunc, pats, opts)
2531 2535 finally:
2532 2536 repo._bookmarks.active = activebookmark
2533 2537 repo._bookmarks.recordchange(tr)
2534 2538 ui.callhooks = True
2535 2539 ctx = repo[node]
2536 2540
2537 2541 # Participating changesets:
2538 2542 #
2539 2543 # node/ctx o - new (intermediate) commit that contains changes
2540 2544 # | from working dir to go into amending commit
2541 2545 # | (or a workingctx if there were no changes)
2542 2546 # |
2543 2547 # old o - changeset to amend
2544 2548 # |
2545 2549 # base o - parent of amending changeset
2546 2550
2547 2551 # Update extra dict from amended commit (e.g. to preserve graft
2548 2552 # source)
2549 2553 extra.update(old.extra())
2550 2554
2551 2555 # Also update it from the intermediate commit or from the wctx
2552 2556 extra.update(ctx.extra())
2553 2557
2554 2558 if len(old.parents()) > 1:
2555 2559 # ctx.files() isn't reliable for merges, so fall back to the
2556 2560 # slower repo.status() method
2557 2561 files = set([fn for st in repo.status(base, old)[:3]
2558 2562 for fn in st])
2559 2563 else:
2560 2564 files = set(old.files())
2561 2565
2562 2566 # Second, we use either the commit we just did, or if there were no
2563 2567 # changes the parent of the working directory as the version of the
2564 2568 # files in the final amend commit
2565 2569 if node:
2566 2570 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2567 2571
2568 2572 user = ctx.user()
2569 2573 date = ctx.date()
2570 2574 # Recompute copies (avoid recording a -> b -> a)
2571 2575 copied = copies.pathcopies(base, ctx)
2572 2576 if old.p2:
2573 2577 copied.update(copies.pathcopies(old.p2(), ctx))
2574 2578
2575 2579 # Prune files which were reverted by the updates: if old
2576 2580 # introduced file X and our intermediate commit, node,
2577 2581 # renamed that file, then those two files are the same and
2578 2582 # we can discard X from our list of files. Likewise if X
2579 2583 # was deleted, it's no longer relevant
2580 2584 files.update(ctx.files())
2581 2585
2582 2586 def samefile(f):
2583 2587 if f in ctx.manifest():
2584 2588 a = ctx.filectx(f)
2585 2589 if f in base.manifest():
2586 2590 b = base.filectx(f)
2587 2591 return (not a.cmp(b)
2588 2592 and a.flags() == b.flags())
2589 2593 else:
2590 2594 return False
2591 2595 else:
2592 2596 return f not in base.manifest()
2593 2597 files = [f for f in files if not samefile(f)]
2594 2598
2595 2599 def filectxfn(repo, ctx_, path):
2596 2600 try:
2597 2601 fctx = ctx[path]
2598 2602 flags = fctx.flags()
2599 2603 mctx = context.memfilectx(repo,
2600 2604 fctx.path(), fctx.data(),
2601 2605 islink='l' in flags,
2602 2606 isexec='x' in flags,
2603 2607 copied=copied.get(path))
2604 2608 return mctx
2605 2609 except KeyError:
2606 2610 return None
2607 2611 else:
2608 2612 ui.note(_('copying changeset %s to %s\n') % (old, base))
2609 2613
2610 2614 # Use version of files as in the old cset
2611 2615 def filectxfn(repo, ctx_, path):
2612 2616 try:
2613 2617 return old.filectx(path)
2614 2618 except KeyError:
2615 2619 return None
2616 2620
2617 2621 user = opts.get('user') or old.user()
2618 2622 date = opts.get('date') or old.date()
2619 2623 editform = mergeeditform(old, 'commit.amend')
2620 2624 editor = getcommiteditor(editform=editform, **opts)
2621 2625 if not message:
2622 2626 editor = getcommiteditor(edit=True, editform=editform)
2623 2627 message = old.description()
2624 2628
2625 2629 pureextra = extra.copy()
2626 2630 extra['amend_source'] = old.hex()
2627 2631
2628 2632 new = context.memctx(repo,
2629 2633 parents=[base.node(), old.p2().node()],
2630 2634 text=message,
2631 2635 files=files,
2632 2636 filectxfn=filectxfn,
2633 2637 user=user,
2634 2638 date=date,
2635 2639 extra=extra,
2636 2640 editor=editor)
2637 2641
2638 2642 newdesc = changelog.stripdesc(new.description())
2639 2643 if ((not node)
2640 2644 and newdesc == old.description()
2641 2645 and user == old.user()
2642 2646 and date == old.date()
2643 2647 and pureextra == old.extra()):
2644 2648 # nothing changed. continuing here would create a new node
2645 2649 # anyway because of the amend_source noise.
2646 2650 #
2647 2651 # This not what we expect from amend.
2648 2652 return old.node()
2649 2653
2650 2654 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2651 2655 try:
2652 2656 if opts.get('secret'):
2653 2657 commitphase = 'secret'
2654 2658 else:
2655 2659 commitphase = old.phase()
2656 2660 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2657 2661 newid = repo.commitctx(new)
2658 2662 finally:
2659 2663 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2660 2664 if newid != old.node():
2661 2665 # Reroute the working copy parent to the new changeset
2662 2666 repo.setparents(newid, nullid)
2663 2667
2664 2668 # Move bookmarks from old parent to amend commit
2665 2669 bms = repo.nodebookmarks(old.node())
2666 2670 if bms:
2667 2671 marks = repo._bookmarks
2668 2672 for bm in bms:
2669 2673 ui.debug('moving bookmarks %r from %s to %s\n' %
2670 2674 (marks, old.hex(), hex(newid)))
2671 2675 marks[bm] = newid
2672 2676 marks.recordchange(tr)
2673 2677 #commit the whole amend process
2674 2678 if createmarkers:
2675 2679 # mark the new changeset as successor of the rewritten one
2676 2680 new = repo[newid]
2677 2681 obs = [(old, (new,))]
2678 2682 if node:
2679 2683 obs.append((ctx, ()))
2680 2684
2681 2685 obsolete.createmarkers(repo, obs)
2682 2686 if not createmarkers and newid != old.node():
2683 2687 # Strip the intermediate commit (if there was one) and the amended
2684 2688 # commit
2685 2689 if node:
2686 2690 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2687 2691 ui.note(_('stripping amended changeset %s\n') % old)
2688 2692 repair.strip(ui, repo, old.node(), topic='amend-backup')
2689 2693 finally:
2690 2694 lockmod.release(lock, wlock)
2691 2695 return newid
2692 2696
2693 2697 def commiteditor(repo, ctx, subs, editform=''):
2694 2698 if ctx.description():
2695 2699 return ctx.description()
2696 2700 return commitforceeditor(repo, ctx, subs, editform=editform,
2697 2701 unchangedmessagedetection=True)
2698 2702
2699 2703 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2700 2704 editform='', unchangedmessagedetection=False):
2701 2705 if not extramsg:
2702 2706 extramsg = _("Leave message empty to abort commit.")
2703 2707
2704 2708 forms = [e for e in editform.split('.') if e]
2705 2709 forms.insert(0, 'changeset')
2706 2710 templatetext = None
2707 2711 while forms:
2708 2712 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2709 2713 if tmpl:
2710 2714 templatetext = committext = buildcommittemplate(
2711 2715 repo, ctx, subs, extramsg, tmpl)
2712 2716 break
2713 2717 forms.pop()
2714 2718 else:
2715 2719 committext = buildcommittext(repo, ctx, subs, extramsg)
2716 2720
2717 2721 # run editor in the repository root
2718 2722 olddir = os.getcwd()
2719 2723 os.chdir(repo.root)
2720 2724
2721 2725 # make in-memory changes visible to external process
2722 2726 tr = repo.currenttransaction()
2723 2727 repo.dirstate.write(tr)
2724 2728 pending = tr and tr.writepending() and repo.root
2725 2729
2726 2730 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2727 2731 editform=editform, pending=pending)
2728 2732 text = re.sub("(?m)^HG:.*(\n|$)", "", editortext)
2729 2733 os.chdir(olddir)
2730 2734
2731 2735 if finishdesc:
2732 2736 text = finishdesc(text)
2733 2737 if not text.strip():
2734 2738 raise error.Abort(_("empty commit message"))
2735 2739 if unchangedmessagedetection and editortext == templatetext:
2736 2740 raise error.Abort(_("commit message unchanged"))
2737 2741
2738 2742 return text
2739 2743
2740 2744 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2741 2745 ui = repo.ui
2742 2746 tmpl, mapfile = gettemplate(ui, tmpl, None)
2743 2747
2744 2748 try:
2745 2749 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2746 2750 except SyntaxError as inst:
2747 2751 raise error.Abort(inst.args[0])
2748 2752
2749 2753 for k, v in repo.ui.configitems('committemplate'):
2750 2754 if k != 'changeset':
2751 2755 t.t.cache[k] = v
2752 2756
2753 2757 if not extramsg:
2754 2758 extramsg = '' # ensure that extramsg is string
2755 2759
2756 2760 ui.pushbuffer()
2757 2761 t.show(ctx, extramsg=extramsg)
2758 2762 return ui.popbuffer()
2759 2763
2760 2764 def hgprefix(msg):
2761 2765 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2762 2766
2763 2767 def buildcommittext(repo, ctx, subs, extramsg):
2764 2768 edittext = []
2765 2769 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2766 2770 if ctx.description():
2767 2771 edittext.append(ctx.description())
2768 2772 edittext.append("")
2769 2773 edittext.append("") # Empty line between message and comments.
2770 2774 edittext.append(hgprefix(_("Enter commit message."
2771 2775 " Lines beginning with 'HG:' are removed.")))
2772 2776 edittext.append(hgprefix(extramsg))
2773 2777 edittext.append("HG: --")
2774 2778 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2775 2779 if ctx.p2():
2776 2780 edittext.append(hgprefix(_("branch merge")))
2777 2781 if ctx.branch():
2778 2782 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2779 2783 if bookmarks.isactivewdirparent(repo):
2780 2784 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2781 2785 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2782 2786 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2783 2787 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2784 2788 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2785 2789 if not added and not modified and not removed:
2786 2790 edittext.append(hgprefix(_("no files changed")))
2787 2791 edittext.append("")
2788 2792
2789 2793 return "\n".join(edittext)
2790 2794
2791 2795 def commitstatus(repo, node, branch, bheads=None, opts=None):
2792 2796 if opts is None:
2793 2797 opts = {}
2794 2798 ctx = repo[node]
2795 2799 parents = ctx.parents()
2796 2800
2797 2801 if (not opts.get('amend') and bheads and node not in bheads and not
2798 2802 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2799 2803 repo.ui.status(_('created new head\n'))
2800 2804 # The message is not printed for initial roots. For the other
2801 2805 # changesets, it is printed in the following situations:
2802 2806 #
2803 2807 # Par column: for the 2 parents with ...
2804 2808 # N: null or no parent
2805 2809 # B: parent is on another named branch
2806 2810 # C: parent is a regular non head changeset
2807 2811 # H: parent was a branch head of the current branch
2808 2812 # Msg column: whether we print "created new head" message
2809 2813 # In the following, it is assumed that there already exists some
2810 2814 # initial branch heads of the current branch, otherwise nothing is
2811 2815 # printed anyway.
2812 2816 #
2813 2817 # Par Msg Comment
2814 2818 # N N y additional topo root
2815 2819 #
2816 2820 # B N y additional branch root
2817 2821 # C N y additional topo head
2818 2822 # H N n usual case
2819 2823 #
2820 2824 # B B y weird additional branch root
2821 2825 # C B y branch merge
2822 2826 # H B n merge with named branch
2823 2827 #
2824 2828 # C C y additional head from merge
2825 2829 # C H n merge with a head
2826 2830 #
2827 2831 # H H n head merge: head count decreases
2828 2832
2829 2833 if not opts.get('close_branch'):
2830 2834 for r in parents:
2831 2835 if r.closesbranch() and r.branch() == branch:
2832 2836 repo.ui.status(_('reopening closed branch head %d\n') % r)
2833 2837
2834 2838 if repo.ui.debugflag:
2835 2839 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2836 2840 elif repo.ui.verbose:
2837 2841 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2838 2842
2839 2843 def postcommitstatus(repo, pats, opts):
2840 2844 return repo.status(match=scmutil.match(repo[None], pats, opts))
2841 2845
2842 2846 def revert(ui, repo, ctx, parents, *pats, **opts):
2843 2847 parent, p2 = parents
2844 2848 node = ctx.node()
2845 2849
2846 2850 mf = ctx.manifest()
2847 2851 if node == p2:
2848 2852 parent = p2
2849 2853 if node == parent:
2850 2854 pmf = mf
2851 2855 else:
2852 2856 pmf = None
2853 2857
2854 2858 # need all matching names in dirstate and manifest of target rev,
2855 2859 # so have to walk both. do not print errors if files exist in one
2856 2860 # but not other. in both cases, filesets should be evaluated against
2857 2861 # workingctx to get consistent result (issue4497). this means 'set:**'
2858 2862 # cannot be used to select missing files from target rev.
2859 2863
2860 2864 # `names` is a mapping for all elements in working copy and target revision
2861 2865 # The mapping is in the form:
2862 2866 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2863 2867 names = {}
2864 2868
2865 2869 with repo.wlock():
2866 2870 ## filling of the `names` mapping
2867 2871 # walk dirstate to fill `names`
2868 2872
2869 2873 interactive = opts.get('interactive', False)
2870 2874 wctx = repo[None]
2871 2875 m = scmutil.match(wctx, pats, opts)
2872 2876
2873 2877 # we'll need this later
2874 2878 targetsubs = sorted(s for s in wctx.substate if m(s))
2875 2879
2876 2880 if not m.always():
2877 2881 for abs in repo.walk(matchmod.badmatch(m, lambda x, y: False)):
2878 2882 names[abs] = m.rel(abs), m.exact(abs)
2879 2883
2880 2884 # walk target manifest to fill `names`
2881 2885
2882 2886 def badfn(path, msg):
2883 2887 if path in names:
2884 2888 return
2885 2889 if path in ctx.substate:
2886 2890 return
2887 2891 path_ = path + '/'
2888 2892 for f in names:
2889 2893 if f.startswith(path_):
2890 2894 return
2891 2895 ui.warn("%s: %s\n" % (m.rel(path), msg))
2892 2896
2893 2897 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
2894 2898 if abs not in names:
2895 2899 names[abs] = m.rel(abs), m.exact(abs)
2896 2900
2897 2901 # Find status of all file in `names`.
2898 2902 m = scmutil.matchfiles(repo, names)
2899 2903
2900 2904 changes = repo.status(node1=node, match=m,
2901 2905 unknown=True, ignored=True, clean=True)
2902 2906 else:
2903 2907 changes = repo.status(node1=node, match=m)
2904 2908 for kind in changes:
2905 2909 for abs in kind:
2906 2910 names[abs] = m.rel(abs), m.exact(abs)
2907 2911
2908 2912 m = scmutil.matchfiles(repo, names)
2909 2913
2910 2914 modified = set(changes.modified)
2911 2915 added = set(changes.added)
2912 2916 removed = set(changes.removed)
2913 2917 _deleted = set(changes.deleted)
2914 2918 unknown = set(changes.unknown)
2915 2919 unknown.update(changes.ignored)
2916 2920 clean = set(changes.clean)
2917 2921 modadded = set()
2918 2922
2919 2923 # split between files known in target manifest and the others
2920 2924 smf = set(mf)
2921 2925
2922 2926 # determine the exact nature of the deleted changesets
2923 2927 deladded = _deleted - smf
2924 2928 deleted = _deleted - deladded
2925 2929
2926 2930 # We need to account for the state of the file in the dirstate,
2927 2931 # even when we revert against something else than parent. This will
2928 2932 # slightly alter the behavior of revert (doing back up or not, delete
2929 2933 # or just forget etc).
2930 2934 if parent == node:
2931 2935 dsmodified = modified
2932 2936 dsadded = added
2933 2937 dsremoved = removed
2934 2938 # store all local modifications, useful later for rename detection
2935 2939 localchanges = dsmodified | dsadded
2936 2940 modified, added, removed = set(), set(), set()
2937 2941 else:
2938 2942 changes = repo.status(node1=parent, match=m)
2939 2943 dsmodified = set(changes.modified)
2940 2944 dsadded = set(changes.added)
2941 2945 dsremoved = set(changes.removed)
2942 2946 # store all local modifications, useful later for rename detection
2943 2947 localchanges = dsmodified | dsadded
2944 2948
2945 2949 # only take into account for removes between wc and target
2946 2950 clean |= dsremoved - removed
2947 2951 dsremoved &= removed
2948 2952 # distinct between dirstate remove and other
2949 2953 removed -= dsremoved
2950 2954
2951 2955 modadded = added & dsmodified
2952 2956 added -= modadded
2953 2957
2954 2958 # tell newly modified apart.
2955 2959 dsmodified &= modified
2956 2960 dsmodified |= modified & dsadded # dirstate added may needs backup
2957 2961 modified -= dsmodified
2958 2962
2959 2963 # We need to wait for some post-processing to update this set
2960 2964 # before making the distinction. The dirstate will be used for
2961 2965 # that purpose.
2962 2966 dsadded = added
2963 2967
2964 2968 # in case of merge, files that are actually added can be reported as
2965 2969 # modified, we need to post process the result
2966 2970 if p2 != nullid:
2967 2971 if pmf is None:
2968 2972 # only need parent manifest in the merge case,
2969 2973 # so do not read by default
2970 2974 pmf = repo[parent].manifest()
2971 2975 mergeadd = dsmodified - set(pmf)
2972 2976 dsadded |= mergeadd
2973 2977 dsmodified -= mergeadd
2974 2978
2975 2979 # if f is a rename, update `names` to also revert the source
2976 2980 cwd = repo.getcwd()
2977 2981 for f in localchanges:
2978 2982 src = repo.dirstate.copied(f)
2979 2983 # XXX should we check for rename down to target node?
2980 2984 if src and src not in names and repo.dirstate[src] == 'r':
2981 2985 dsremoved.add(src)
2982 2986 names[src] = (repo.pathto(src, cwd), True)
2983 2987
2984 2988 # distinguish between file to forget and the other
2985 2989 added = set()
2986 2990 for abs in dsadded:
2987 2991 if repo.dirstate[abs] != 'a':
2988 2992 added.add(abs)
2989 2993 dsadded -= added
2990 2994
2991 2995 for abs in deladded:
2992 2996 if repo.dirstate[abs] == 'a':
2993 2997 dsadded.add(abs)
2994 2998 deladded -= dsadded
2995 2999
2996 3000 # For files marked as removed, we check if an unknown file is present at
2997 3001 # the same path. If a such file exists it may need to be backed up.
2998 3002 # Making the distinction at this stage helps have simpler backup
2999 3003 # logic.
3000 3004 removunk = set()
3001 3005 for abs in removed:
3002 3006 target = repo.wjoin(abs)
3003 3007 if os.path.lexists(target):
3004 3008 removunk.add(abs)
3005 3009 removed -= removunk
3006 3010
3007 3011 dsremovunk = set()
3008 3012 for abs in dsremoved:
3009 3013 target = repo.wjoin(abs)
3010 3014 if os.path.lexists(target):
3011 3015 dsremovunk.add(abs)
3012 3016 dsremoved -= dsremovunk
3013 3017
3014 3018 # action to be actually performed by revert
3015 3019 # (<list of file>, message>) tuple
3016 3020 actions = {'revert': ([], _('reverting %s\n')),
3017 3021 'add': ([], _('adding %s\n')),
3018 3022 'remove': ([], _('removing %s\n')),
3019 3023 'drop': ([], _('removing %s\n')),
3020 3024 'forget': ([], _('forgetting %s\n')),
3021 3025 'undelete': ([], _('undeleting %s\n')),
3022 3026 'noop': (None, _('no changes needed to %s\n')),
3023 3027 'unknown': (None, _('file not managed: %s\n')),
3024 3028 }
3025 3029
3026 3030 # "constant" that convey the backup strategy.
3027 3031 # All set to `discard` if `no-backup` is set do avoid checking
3028 3032 # no_backup lower in the code.
3029 3033 # These values are ordered for comparison purposes
3030 3034 backup = 2 # unconditionally do backup
3031 3035 check = 1 # check if the existing file differs from target
3032 3036 discard = 0 # never do backup
3033 3037 if opts.get('no_backup'):
3034 3038 backup = check = discard
3035 3039
3036 3040 backupanddel = actions['remove']
3037 3041 if not opts.get('no_backup'):
3038 3042 backupanddel = actions['drop']
3039 3043
3040 3044 disptable = (
3041 3045 # dispatch table:
3042 3046 # file state
3043 3047 # action
3044 3048 # make backup
3045 3049
3046 3050 ## Sets that results that will change file on disk
3047 3051 # Modified compared to target, no local change
3048 3052 (modified, actions['revert'], discard),
3049 3053 # Modified compared to target, but local file is deleted
3050 3054 (deleted, actions['revert'], discard),
3051 3055 # Modified compared to target, local change
3052 3056 (dsmodified, actions['revert'], backup),
3053 3057 # Added since target
3054 3058 (added, actions['remove'], discard),
3055 3059 # Added in working directory
3056 3060 (dsadded, actions['forget'], discard),
3057 3061 # Added since target, have local modification
3058 3062 (modadded, backupanddel, backup),
3059 3063 # Added since target but file is missing in working directory
3060 3064 (deladded, actions['drop'], discard),
3061 3065 # Removed since target, before working copy parent
3062 3066 (removed, actions['add'], discard),
3063 3067 # Same as `removed` but an unknown file exists at the same path
3064 3068 (removunk, actions['add'], check),
3065 3069 # Removed since targe, marked as such in working copy parent
3066 3070 (dsremoved, actions['undelete'], discard),
3067 3071 # Same as `dsremoved` but an unknown file exists at the same path
3068 3072 (dsremovunk, actions['undelete'], check),
3069 3073 ## the following sets does not result in any file changes
3070 3074 # File with no modification
3071 3075 (clean, actions['noop'], discard),
3072 3076 # Existing file, not tracked anywhere
3073 3077 (unknown, actions['unknown'], discard),
3074 3078 )
3075 3079
3076 3080 for abs, (rel, exact) in sorted(names.items()):
3077 3081 # target file to be touch on disk (relative to cwd)
3078 3082 target = repo.wjoin(abs)
3079 3083 # search the entry in the dispatch table.
3080 3084 # if the file is in any of these sets, it was touched in the working
3081 3085 # directory parent and we are sure it needs to be reverted.
3082 3086 for table, (xlist, msg), dobackup in disptable:
3083 3087 if abs not in table:
3084 3088 continue
3085 3089 if xlist is not None:
3086 3090 xlist.append(abs)
3087 3091 if dobackup and (backup <= dobackup
3088 3092 or wctx[abs].cmp(ctx[abs])):
3089 3093 bakname = scmutil.origpath(ui, repo, rel)
3090 3094 ui.note(_('saving current version of %s as %s\n') %
3091 3095 (rel, bakname))
3092 3096 if not opts.get('dry_run'):
3093 3097 if interactive:
3094 3098 util.copyfile(target, bakname)
3095 3099 else:
3096 3100 util.rename(target, bakname)
3097 3101 if ui.verbose or not exact:
3098 3102 if not isinstance(msg, basestring):
3099 3103 msg = msg(abs)
3100 3104 ui.status(msg % rel)
3101 3105 elif exact:
3102 3106 ui.warn(msg % rel)
3103 3107 break
3104 3108
3105 3109 if not opts.get('dry_run'):
3106 3110 needdata = ('revert', 'add', 'undelete')
3107 3111 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3108 3112 _performrevert(repo, parents, ctx, actions, interactive)
3109 3113
3110 3114 if targetsubs:
3111 3115 # Revert the subrepos on the revert list
3112 3116 for sub in targetsubs:
3113 3117 try:
3114 3118 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3115 3119 except KeyError:
3116 3120 raise error.Abort("subrepository '%s' does not exist in %s!"
3117 3121 % (sub, short(ctx.node())))
3118 3122
3119 3123 def _revertprefetch(repo, ctx, *files):
3120 3124 """Let extension changing the storage layer prefetch content"""
3121 3125 pass
3122 3126
3123 3127 def _performrevert(repo, parents, ctx, actions, interactive=False):
3124 3128 """function that actually perform all the actions computed for revert
3125 3129
3126 3130 This is an independent function to let extension to plug in and react to
3127 3131 the imminent revert.
3128 3132
3129 3133 Make sure you have the working directory locked when calling this function.
3130 3134 """
3131 3135 parent, p2 = parents
3132 3136 node = ctx.node()
3133 3137 excluded_files = []
3134 3138 matcher_opts = {"exclude": excluded_files}
3135 3139
3136 3140 def checkout(f):
3137 3141 fc = ctx[f]
3138 3142 repo.wwrite(f, fc.data(), fc.flags())
3139 3143
3140 3144 audit_path = pathutil.pathauditor(repo.root)
3141 3145 for f in actions['forget'][0]:
3142 3146 if interactive:
3143 3147 choice = \
3144 3148 repo.ui.promptchoice(
3145 3149 _("forget added file %s (yn)?$$ &Yes $$ &No")
3146 3150 % f)
3147 3151 if choice == 0:
3148 3152 repo.dirstate.drop(f)
3149 3153 else:
3150 3154 excluded_files.append(repo.wjoin(f))
3151 3155 else:
3152 3156 repo.dirstate.drop(f)
3153 3157 for f in actions['remove'][0]:
3154 3158 audit_path(f)
3155 3159 try:
3156 3160 util.unlinkpath(repo.wjoin(f))
3157 3161 except OSError:
3158 3162 pass
3159 3163 repo.dirstate.remove(f)
3160 3164 for f in actions['drop'][0]:
3161 3165 audit_path(f)
3162 3166 repo.dirstate.remove(f)
3163 3167
3164 3168 normal = None
3165 3169 if node == parent:
3166 3170 # We're reverting to our parent. If possible, we'd like status
3167 3171 # to report the file as clean. We have to use normallookup for
3168 3172 # merges to avoid losing information about merged/dirty files.
3169 3173 if p2 != nullid:
3170 3174 normal = repo.dirstate.normallookup
3171 3175 else:
3172 3176 normal = repo.dirstate.normal
3173 3177
3174 3178 newlyaddedandmodifiedfiles = set()
3175 3179 if interactive:
3176 3180 # Prompt the user for changes to revert
3177 3181 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3178 3182 m = scmutil.match(ctx, torevert, matcher_opts)
3179 3183 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3180 3184 diffopts.nodates = True
3181 3185 diffopts.git = True
3182 3186 reversehunks = repo.ui.configbool('experimental',
3183 3187 'revertalternateinteractivemode',
3184 3188 True)
3185 3189 if reversehunks:
3186 3190 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3187 3191 else:
3188 3192 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3189 3193 originalchunks = patch.parsepatch(diff)
3190 3194
3191 3195 try:
3192 3196
3193 3197 chunks, opts = recordfilter(repo.ui, originalchunks)
3194 3198 if reversehunks:
3195 3199 chunks = patch.reversehunks(chunks)
3196 3200
3197 3201 except patch.PatchError as err:
3198 3202 raise error.Abort(_('error parsing patch: %s') % err)
3199 3203
3200 3204 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3201 3205 # Apply changes
3202 3206 fp = cStringIO.StringIO()
3203 3207 for c in chunks:
3204 3208 c.write(fp)
3205 3209 dopatch = fp.tell()
3206 3210 fp.seek(0)
3207 3211 if dopatch:
3208 3212 try:
3209 3213 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3210 3214 except patch.PatchError as err:
3211 3215 raise error.Abort(str(err))
3212 3216 del fp
3213 3217 else:
3214 3218 for f in actions['revert'][0]:
3215 3219 checkout(f)
3216 3220 if normal:
3217 3221 normal(f)
3218 3222
3219 3223 for f in actions['add'][0]:
3220 3224 # Don't checkout modified files, they are already created by the diff
3221 3225 if f not in newlyaddedandmodifiedfiles:
3222 3226 checkout(f)
3223 3227 repo.dirstate.add(f)
3224 3228
3225 3229 normal = repo.dirstate.normallookup
3226 3230 if node == parent and p2 == nullid:
3227 3231 normal = repo.dirstate.normal
3228 3232 for f in actions['undelete'][0]:
3229 3233 checkout(f)
3230 3234 normal(f)
3231 3235
3232 3236 copied = copies.pathcopies(repo[parent], ctx)
3233 3237
3234 3238 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3235 3239 if f in copied:
3236 3240 repo.dirstate.copy(copied[f], f)
3237 3241
3238 3242 def command(table):
3239 3243 """Returns a function object to be used as a decorator for making commands.
3240 3244
3241 3245 This function receives a command table as its argument. The table should
3242 3246 be a dict.
3243 3247
3244 3248 The returned function can be used as a decorator for adding commands
3245 3249 to that command table. This function accepts multiple arguments to define
3246 3250 a command.
3247 3251
3248 3252 The first argument is the command name.
3249 3253
3250 3254 The options argument is an iterable of tuples defining command arguments.
3251 3255 See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
3252 3256
3253 3257 The synopsis argument defines a short, one line summary of how to use the
3254 3258 command. This shows up in the help output.
3255 3259
3256 3260 The norepo argument defines whether the command does not require a
3257 3261 local repository. Most commands operate against a repository, thus the
3258 3262 default is False.
3259 3263
3260 3264 The optionalrepo argument defines whether the command optionally requires
3261 3265 a local repository.
3262 3266
3263 3267 The inferrepo argument defines whether to try to find a repository from the
3264 3268 command line arguments. If True, arguments will be examined for potential
3265 3269 repository locations. See ``findrepo()``. If a repository is found, it
3266 3270 will be used.
3267 3271 """
3268 3272 def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False,
3269 3273 inferrepo=False):
3270 3274 def decorator(func):
3271 3275 if synopsis:
3272 3276 table[name] = func, list(options), synopsis
3273 3277 else:
3274 3278 table[name] = func, list(options)
3275 3279
3276 3280 if norepo:
3277 3281 # Avoid import cycle.
3278 3282 import commands
3279 3283 commands.norepo += ' %s' % ' '.join(parsealiases(name))
3280 3284
3281 3285 if optionalrepo:
3282 3286 import commands
3283 3287 commands.optionalrepo += ' %s' % ' '.join(parsealiases(name))
3284 3288
3285 3289 if inferrepo:
3286 3290 import commands
3287 3291 commands.inferrepo += ' %s' % ' '.join(parsealiases(name))
3288 3292
3289 3293 return func
3290 3294 return decorator
3291 3295
3292 3296 return cmd
3293 3297
3294 3298 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3295 3299 # commands.outgoing. "missing" is "missing" of the result of
3296 3300 # "findcommonoutgoing()"
3297 3301 outgoinghooks = util.hooks()
3298 3302
3299 3303 # a list of (ui, repo) functions called by commands.summary
3300 3304 summaryhooks = util.hooks()
3301 3305
3302 3306 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3303 3307 #
3304 3308 # functions should return tuple of booleans below, if 'changes' is None:
3305 3309 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3306 3310 #
3307 3311 # otherwise, 'changes' is a tuple of tuples below:
3308 3312 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3309 3313 # - (desturl, destbranch, destpeer, outgoing)
3310 3314 summaryremotehooks = util.hooks()
3311 3315
3312 3316 # A list of state files kept by multistep operations like graft.
3313 3317 # Since graft cannot be aborted, it is considered 'clearable' by update.
3314 3318 # note: bisect is intentionally excluded
3315 3319 # (state file, clearable, allowcommit, error, hint)
3316 3320 unfinishedstates = [
3317 3321 ('graftstate', True, False, _('graft in progress'),
3318 3322 _("use 'hg graft --continue' or 'hg update' to abort")),
3319 3323 ('updatestate', True, False, _('last update was interrupted'),
3320 3324 _("use 'hg update' to get a consistent checkout"))
3321 3325 ]
3322 3326
3323 3327 def checkunfinished(repo, commit=False):
3324 3328 '''Look for an unfinished multistep operation, like graft, and abort
3325 3329 if found. It's probably good to check this right before
3326 3330 bailifchanged().
3327 3331 '''
3328 3332 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3329 3333 if commit and allowcommit:
3330 3334 continue
3331 3335 if repo.vfs.exists(f):
3332 3336 raise error.Abort(msg, hint=hint)
3333 3337
3334 3338 def clearunfinished(repo):
3335 3339 '''Check for unfinished operations (as above), and clear the ones
3336 3340 that are clearable.
3337 3341 '''
3338 3342 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3339 3343 if not clearable and repo.vfs.exists(f):
3340 3344 raise error.Abort(msg, hint=hint)
3341 3345 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3342 3346 if clearable and repo.vfs.exists(f):
3343 3347 util.unlink(repo.join(f))
3344 3348
3345 3349 afterresolvedstates = [
3346 3350 ('graftstate',
3347 3351 _('hg graft --continue')),
3348 3352 ]
3349 3353
3350 3354 def howtocontinue(repo):
3351 3355 '''Check for an unfinished operation and return the command to finish
3352 3356 it.
3353 3357
3354 3358 afterresolvedstates tupples define a .hg/{file} and the corresponding
3355 3359 command needed to finish it.
3356 3360
3357 3361 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3358 3362 a boolean.
3359 3363 '''
3360 3364 contmsg = _("continue: %s")
3361 3365 for f, msg in afterresolvedstates:
3362 3366 if repo.vfs.exists(f):
3363 3367 return contmsg % msg, True
3364 3368 workingctx = repo[None]
3365 3369 dirty = any(repo.status()) or any(workingctx.sub(s).dirty()
3366 3370 for s in workingctx.substate)
3367 3371 if dirty:
3368 3372 return contmsg % _("hg commit"), False
3369 3373 return None, None
3370 3374
3371 3375 def checkafterresolved(repo):
3372 3376 '''Inform the user about the next action after completing hg resolve
3373 3377
3374 3378 If there's a matching afterresolvedstates, howtocontinue will yield
3375 3379 repo.ui.warn as the reporter.
3376 3380
3377 3381 Otherwise, it will yield repo.ui.note.
3378 3382 '''
3379 3383 msg, warning = howtocontinue(repo)
3380 3384 if msg is not None:
3381 3385 if warning:
3382 3386 repo.ui.warn("%s\n" % msg)
3383 3387 else:
3384 3388 repo.ui.note("%s\n" % msg)
3385 3389
3386 3390 def wrongtooltocontinue(repo, task):
3387 3391 '''Raise an abort suggesting how to properly continue if there is an
3388 3392 active task.
3389 3393
3390 3394 Uses howtocontinue() to find the active task.
3391 3395
3392 3396 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3393 3397 a hint.
3394 3398 '''
3395 3399 after = howtocontinue(repo)
3396 3400 hint = None
3397 3401 if after[1]:
3398 3402 hint = after[0]
3399 3403 raise error.Abort(_('no %s in progress') % task, hint=hint)
3400 3404
3401 3405 class dirstateguard(object):
3402 3406 '''Restore dirstate at unexpected failure.
3403 3407
3404 3408 At the construction, this class does:
3405 3409
3406 3410 - write current ``repo.dirstate`` out, and
3407 3411 - save ``.hg/dirstate`` into the backup file
3408 3412
3409 3413 This restores ``.hg/dirstate`` from backup file, if ``release()``
3410 3414 is invoked before ``close()``.
3411 3415
3412 3416 This just removes the backup file at ``close()`` before ``release()``.
3413 3417 '''
3414 3418
3415 3419 def __init__(self, repo, name):
3416 3420 self._repo = repo
3417 3421 self._suffix = '.backup.%s.%d' % (name, id(self))
3418 3422 repo.dirstate._savebackup(repo.currenttransaction(), self._suffix)
3419 3423 self._active = True
3420 3424 self._closed = False
3421 3425
3422 3426 def __del__(self):
3423 3427 if self._active: # still active
3424 3428 # this may occur, even if this class is used correctly:
3425 3429 # for example, releasing other resources like transaction
3426 3430 # may raise exception before ``dirstateguard.release`` in
3427 3431 # ``release(tr, ....)``.
3428 3432 self._abort()
3429 3433
3430 3434 def close(self):
3431 3435 if not self._active: # already inactivated
3432 3436 msg = (_("can't close already inactivated backup: dirstate%s")
3433 3437 % self._suffix)
3434 3438 raise error.Abort(msg)
3435 3439
3436 3440 self._repo.dirstate._clearbackup(self._repo.currenttransaction(),
3437 3441 self._suffix)
3438 3442 self._active = False
3439 3443 self._closed = True
3440 3444
3441 3445 def _abort(self):
3442 3446 self._repo.dirstate._restorebackup(self._repo.currenttransaction(),
3443 3447 self._suffix)
3444 3448 self._active = False
3445 3449
3446 3450 def release(self):
3447 3451 if not self._closed:
3448 3452 if not self._active: # already inactivated
3449 3453 msg = (_("can't release already inactivated backup:"
3450 3454 " dirstate%s")
3451 3455 % self._suffix)
3452 3456 raise error.Abort(msg)
3453 3457 self._abort()
General Comments 0
You need to be logged in to leave comments. Login now