##// END OF EJS Templates
cmdutil: provide a way to report how to continue...
timeless -
r28120:ed4d06f1 default
parent child Browse files
Show More
@@ -1,3410 +1,3453
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import hex, bin, nullid, nullrev, short
9 9 from i18n import _
10 10 import os, sys, errno, re, tempfile, cStringIO
11 11 import util, scmutil, templater, patch, error, templatekw, revlog, copies
12 12 import match as matchmod
13 13 import repair, graphmod, revset, phases, obsolete, pathutil
14 14 import changelog
15 15 import bookmarks
16 16 import encoding
17 17 import formatter
18 18 import crecord as crecordmod
19 19 import lock as lockmod
20 20
21 21 def ishunk(x):
22 22 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
23 23 return isinstance(x, hunkclasses)
24 24
25 25 def newandmodified(chunks, originalchunks):
26 26 newlyaddedandmodifiedfiles = set()
27 27 for chunk in chunks:
28 28 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
29 29 originalchunks:
30 30 newlyaddedandmodifiedfiles.add(chunk.header.filename())
31 31 return newlyaddedandmodifiedfiles
32 32
33 33 def parsealiases(cmd):
34 34 return cmd.lstrip("^").split("|")
35 35
36 36 def setupwrapcolorwrite(ui):
37 37 # wrap ui.write so diff output can be labeled/colorized
38 38 def wrapwrite(orig, *args, **kw):
39 39 label = kw.pop('label', '')
40 40 for chunk, l in patch.difflabel(lambda: args):
41 41 orig(chunk, label=label + l)
42 42
43 43 oldwrite = ui.write
44 44 def wrap(*args, **kwargs):
45 45 return wrapwrite(oldwrite, *args, **kwargs)
46 46 setattr(ui, 'write', wrap)
47 47 return oldwrite
48 48
49 49 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
50 50 if usecurses:
51 51 if testfile:
52 52 recordfn = crecordmod.testdecorator(testfile,
53 53 crecordmod.testchunkselector)
54 54 else:
55 55 recordfn = crecordmod.chunkselector
56 56
57 57 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
58 58
59 59 else:
60 60 return patch.filterpatch(ui, originalhunks, operation)
61 61
62 62 def recordfilter(ui, originalhunks, operation=None):
63 63 """ Prompts the user to filter the originalhunks and return a list of
64 64 selected hunks.
65 65 *operation* is used for ui purposes to indicate the user
66 66 what kind of filtering they are doing: reverting, committing, shelving, etc.
67 67 *operation* has to be a translated string.
68 68 """
69 69 usecurses = crecordmod.checkcurses(ui)
70 70 testfile = ui.config('experimental', 'crecordtest', None)
71 71 oldwrite = setupwrapcolorwrite(ui)
72 72 try:
73 73 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
74 74 testfile, operation)
75 75 finally:
76 76 ui.write = oldwrite
77 77 return newchunks, newopts
78 78
79 79 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
80 80 filterfn, *pats, **opts):
81 81 import merge as mergemod
82 82
83 83 if not ui.interactive():
84 84 if cmdsuggest:
85 85 msg = _('running non-interactively, use %s instead') % cmdsuggest
86 86 else:
87 87 msg = _('running non-interactively')
88 88 raise error.Abort(msg)
89 89
90 90 # make sure username is set before going interactive
91 91 if not opts.get('user'):
92 92 ui.username() # raise exception, username not provided
93 93
94 94 def recordfunc(ui, repo, message, match, opts):
95 95 """This is generic record driver.
96 96
97 97 Its job is to interactively filter local changes, and
98 98 accordingly prepare working directory into a state in which the
99 99 job can be delegated to a non-interactive commit command such as
100 100 'commit' or 'qrefresh'.
101 101
102 102 After the actual job is done by non-interactive command, the
103 103 working directory is restored to its original state.
104 104
105 105 In the end we'll record interesting changes, and everything else
106 106 will be left in place, so the user can continue working.
107 107 """
108 108
109 109 checkunfinished(repo, commit=True)
110 110 merge = len(repo[None].parents()) > 1
111 111 if merge:
112 112 raise error.Abort(_('cannot partially commit a merge '
113 113 '(use "hg commit" instead)'))
114 114
115 115 status = repo.status(match=match)
116 116 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
117 117 diffopts.nodates = True
118 118 diffopts.git = True
119 119 diffopts.showfunc = True
120 120 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
121 121 originalchunks = patch.parsepatch(originaldiff)
122 122
123 123 # 1. filter patch, so we have intending-to apply subset of it
124 124 try:
125 125 chunks, newopts = filterfn(ui, originalchunks)
126 126 except patch.PatchError as err:
127 127 raise error.Abort(_('error parsing patch: %s') % err)
128 128 opts.update(newopts)
129 129
130 130 # We need to keep a backup of files that have been newly added and
131 131 # modified during the recording process because there is a previous
132 132 # version without the edit in the workdir
133 133 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
134 134 contenders = set()
135 135 for h in chunks:
136 136 try:
137 137 contenders.update(set(h.files()))
138 138 except AttributeError:
139 139 pass
140 140
141 141 changed = status.modified + status.added + status.removed
142 142 newfiles = [f for f in changed if f in contenders]
143 143 if not newfiles:
144 144 ui.status(_('no changes to record\n'))
145 145 return 0
146 146
147 147 modified = set(status.modified)
148 148
149 149 # 2. backup changed files, so we can restore them in the end
150 150
151 151 if backupall:
152 152 tobackup = changed
153 153 else:
154 154 tobackup = [f for f in newfiles if f in modified or f in \
155 155 newlyaddedandmodifiedfiles]
156 156 backups = {}
157 157 if tobackup:
158 158 backupdir = repo.join('record-backups')
159 159 try:
160 160 os.mkdir(backupdir)
161 161 except OSError as err:
162 162 if err.errno != errno.EEXIST:
163 163 raise
164 164 try:
165 165 # backup continues
166 166 for f in tobackup:
167 167 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
168 168 dir=backupdir)
169 169 os.close(fd)
170 170 ui.debug('backup %r as %r\n' % (f, tmpname))
171 171 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
172 172 backups[f] = tmpname
173 173
174 174 fp = cStringIO.StringIO()
175 175 for c in chunks:
176 176 fname = c.filename()
177 177 if fname in backups:
178 178 c.write(fp)
179 179 dopatch = fp.tell()
180 180 fp.seek(0)
181 181
182 182 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
183 183 # 3a. apply filtered patch to clean repo (clean)
184 184 if backups:
185 185 # Equivalent to hg.revert
186 186 m = scmutil.matchfiles(repo, backups.keys())
187 187 mergemod.update(repo, repo.dirstate.p1(),
188 188 False, True, matcher=m)
189 189
190 190 # 3b. (apply)
191 191 if dopatch:
192 192 try:
193 193 ui.debug('applying patch\n')
194 194 ui.debug(fp.getvalue())
195 195 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
196 196 except patch.PatchError as err:
197 197 raise error.Abort(str(err))
198 198 del fp
199 199
200 200 # 4. We prepared working directory according to filtered
201 201 # patch. Now is the time to delegate the job to
202 202 # commit/qrefresh or the like!
203 203
204 204 # Make all of the pathnames absolute.
205 205 newfiles = [repo.wjoin(nf) for nf in newfiles]
206 206 return commitfunc(ui, repo, *newfiles, **opts)
207 207 finally:
208 208 # 5. finally restore backed-up files
209 209 try:
210 210 dirstate = repo.dirstate
211 211 for realname, tmpname in backups.iteritems():
212 212 ui.debug('restoring %r to %r\n' % (tmpname, realname))
213 213
214 214 if dirstate[realname] == 'n':
215 215 # without normallookup, restoring timestamp
216 216 # may cause partially committed files
217 217 # to be treated as unmodified
218 218 dirstate.normallookup(realname)
219 219
220 220 # copystat=True here and above are a hack to trick any
221 221 # editors that have f open that we haven't modified them.
222 222 #
223 223 # Also note that this racy as an editor could notice the
224 224 # file's mtime before we've finished writing it.
225 225 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
226 226 os.unlink(tmpname)
227 227 if tobackup:
228 228 os.rmdir(backupdir)
229 229 except OSError:
230 230 pass
231 231
232 232 def recordinwlock(ui, repo, message, match, opts):
233 233 with repo.wlock():
234 234 return recordfunc(ui, repo, message, match, opts)
235 235
236 236 return commit(ui, repo, recordinwlock, pats, opts)
237 237
238 238 def findpossible(cmd, table, strict=False):
239 239 """
240 240 Return cmd -> (aliases, command table entry)
241 241 for each matching command.
242 242 Return debug commands (or their aliases) only if no normal command matches.
243 243 """
244 244 choice = {}
245 245 debugchoice = {}
246 246
247 247 if cmd in table:
248 248 # short-circuit exact matches, "log" alias beats "^log|history"
249 249 keys = [cmd]
250 250 else:
251 251 keys = table.keys()
252 252
253 253 allcmds = []
254 254 for e in keys:
255 255 aliases = parsealiases(e)
256 256 allcmds.extend(aliases)
257 257 found = None
258 258 if cmd in aliases:
259 259 found = cmd
260 260 elif not strict:
261 261 for a in aliases:
262 262 if a.startswith(cmd):
263 263 found = a
264 264 break
265 265 if found is not None:
266 266 if aliases[0].startswith("debug") or found.startswith("debug"):
267 267 debugchoice[found] = (aliases, table[e])
268 268 else:
269 269 choice[found] = (aliases, table[e])
270 270
271 271 if not choice and debugchoice:
272 272 choice = debugchoice
273 273
274 274 return choice, allcmds
275 275
276 276 def findcmd(cmd, table, strict=True):
277 277 """Return (aliases, command table entry) for command string."""
278 278 choice, allcmds = findpossible(cmd, table, strict)
279 279
280 280 if cmd in choice:
281 281 return choice[cmd]
282 282
283 283 if len(choice) > 1:
284 284 clist = choice.keys()
285 285 clist.sort()
286 286 raise error.AmbiguousCommand(cmd, clist)
287 287
288 288 if choice:
289 289 return choice.values()[0]
290 290
291 291 raise error.UnknownCommand(cmd, allcmds)
292 292
293 293 def findrepo(p):
294 294 while not os.path.isdir(os.path.join(p, ".hg")):
295 295 oldp, p = p, os.path.dirname(p)
296 296 if p == oldp:
297 297 return None
298 298
299 299 return p
300 300
301 301 def bailifchanged(repo, merge=True):
302 302 if merge and repo.dirstate.p2() != nullid:
303 303 raise error.Abort(_('outstanding uncommitted merge'))
304 304 modified, added, removed, deleted = repo.status()[:4]
305 305 if modified or added or removed or deleted:
306 306 raise error.Abort(_('uncommitted changes'))
307 307 ctx = repo[None]
308 308 for s in sorted(ctx.substate):
309 309 ctx.sub(s).bailifchanged()
310 310
311 311 def logmessage(ui, opts):
312 312 """ get the log message according to -m and -l option """
313 313 message = opts.get('message')
314 314 logfile = opts.get('logfile')
315 315
316 316 if message and logfile:
317 317 raise error.Abort(_('options --message and --logfile are mutually '
318 318 'exclusive'))
319 319 if not message and logfile:
320 320 try:
321 321 if logfile == '-':
322 322 message = ui.fin.read()
323 323 else:
324 324 message = '\n'.join(util.readfile(logfile).splitlines())
325 325 except IOError as inst:
326 326 raise error.Abort(_("can't read commit message '%s': %s") %
327 327 (logfile, inst.strerror))
328 328 return message
329 329
330 330 def mergeeditform(ctxorbool, baseformname):
331 331 """return appropriate editform name (referencing a committemplate)
332 332
333 333 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
334 334 merging is committed.
335 335
336 336 This returns baseformname with '.merge' appended if it is a merge,
337 337 otherwise '.normal' is appended.
338 338 """
339 339 if isinstance(ctxorbool, bool):
340 340 if ctxorbool:
341 341 return baseformname + ".merge"
342 342 elif 1 < len(ctxorbool.parents()):
343 343 return baseformname + ".merge"
344 344
345 345 return baseformname + ".normal"
346 346
347 347 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
348 348 editform='', **opts):
349 349 """get appropriate commit message editor according to '--edit' option
350 350
351 351 'finishdesc' is a function to be called with edited commit message
352 352 (= 'description' of the new changeset) just after editing, but
353 353 before checking empty-ness. It should return actual text to be
354 354 stored into history. This allows to change description before
355 355 storing.
356 356
357 357 'extramsg' is a extra message to be shown in the editor instead of
358 358 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
359 359 is automatically added.
360 360
361 361 'editform' is a dot-separated list of names, to distinguish
362 362 the purpose of commit text editing.
363 363
364 364 'getcommiteditor' returns 'commitforceeditor' regardless of
365 365 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
366 366 they are specific for usage in MQ.
367 367 """
368 368 if edit or finishdesc or extramsg:
369 369 return lambda r, c, s: commitforceeditor(r, c, s,
370 370 finishdesc=finishdesc,
371 371 extramsg=extramsg,
372 372 editform=editform)
373 373 elif editform:
374 374 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
375 375 else:
376 376 return commiteditor
377 377
378 378 def loglimit(opts):
379 379 """get the log limit according to option -l/--limit"""
380 380 limit = opts.get('limit')
381 381 if limit:
382 382 try:
383 383 limit = int(limit)
384 384 except ValueError:
385 385 raise error.Abort(_('limit must be a positive integer'))
386 386 if limit <= 0:
387 387 raise error.Abort(_('limit must be positive'))
388 388 else:
389 389 limit = None
390 390 return limit
391 391
392 392 def makefilename(repo, pat, node, desc=None,
393 393 total=None, seqno=None, revwidth=None, pathname=None):
394 394 node_expander = {
395 395 'H': lambda: hex(node),
396 396 'R': lambda: str(repo.changelog.rev(node)),
397 397 'h': lambda: short(node),
398 398 'm': lambda: re.sub('[^\w]', '_', str(desc))
399 399 }
400 400 expander = {
401 401 '%': lambda: '%',
402 402 'b': lambda: os.path.basename(repo.root),
403 403 }
404 404
405 405 try:
406 406 if node:
407 407 expander.update(node_expander)
408 408 if node:
409 409 expander['r'] = (lambda:
410 410 str(repo.changelog.rev(node)).zfill(revwidth or 0))
411 411 if total is not None:
412 412 expander['N'] = lambda: str(total)
413 413 if seqno is not None:
414 414 expander['n'] = lambda: str(seqno)
415 415 if total is not None and seqno is not None:
416 416 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
417 417 if pathname is not None:
418 418 expander['s'] = lambda: os.path.basename(pathname)
419 419 expander['d'] = lambda: os.path.dirname(pathname) or '.'
420 420 expander['p'] = lambda: pathname
421 421
422 422 newname = []
423 423 patlen = len(pat)
424 424 i = 0
425 425 while i < patlen:
426 426 c = pat[i]
427 427 if c == '%':
428 428 i += 1
429 429 c = pat[i]
430 430 c = expander[c]()
431 431 newname.append(c)
432 432 i += 1
433 433 return ''.join(newname)
434 434 except KeyError as inst:
435 435 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
436 436 inst.args[0])
437 437
438 438 class _unclosablefile(object):
439 439 def __init__(self, fp):
440 440 self._fp = fp
441 441
442 442 def close(self):
443 443 pass
444 444
445 445 def __iter__(self):
446 446 return iter(self._fp)
447 447
448 448 def __getattr__(self, attr):
449 449 return getattr(self._fp, attr)
450 450
451 451 def makefileobj(repo, pat, node=None, desc=None, total=None,
452 452 seqno=None, revwidth=None, mode='wb', modemap=None,
453 453 pathname=None):
454 454
455 455 writable = mode not in ('r', 'rb')
456 456
457 457 if not pat or pat == '-':
458 458 if writable:
459 459 fp = repo.ui.fout
460 460 else:
461 461 fp = repo.ui.fin
462 462 return _unclosablefile(fp)
463 463 if util.safehasattr(pat, 'write') and writable:
464 464 return pat
465 465 if util.safehasattr(pat, 'read') and 'r' in mode:
466 466 return pat
467 467 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
468 468 if modemap is not None:
469 469 mode = modemap.get(fn, mode)
470 470 if mode == 'wb':
471 471 modemap[fn] = 'ab'
472 472 return open(fn, mode)
473 473
474 474 def openrevlog(repo, cmd, file_, opts):
475 475 """opens the changelog, manifest, a filelog or a given revlog"""
476 476 cl = opts['changelog']
477 477 mf = opts['manifest']
478 478 dir = opts['dir']
479 479 msg = None
480 480 if cl and mf:
481 481 msg = _('cannot specify --changelog and --manifest at the same time')
482 482 elif cl and dir:
483 483 msg = _('cannot specify --changelog and --dir at the same time')
484 484 elif cl or mf:
485 485 if file_:
486 486 msg = _('cannot specify filename with --changelog or --manifest')
487 487 elif not repo:
488 488 msg = _('cannot specify --changelog or --manifest or --dir '
489 489 'without a repository')
490 490 if msg:
491 491 raise error.Abort(msg)
492 492
493 493 r = None
494 494 if repo:
495 495 if cl:
496 496 r = repo.unfiltered().changelog
497 497 elif dir:
498 498 if 'treemanifest' not in repo.requirements:
499 499 raise error.Abort(_("--dir can only be used on repos with "
500 500 "treemanifest enabled"))
501 501 dirlog = repo.dirlog(file_)
502 502 if len(dirlog):
503 503 r = dirlog
504 504 elif mf:
505 505 r = repo.manifest
506 506 elif file_:
507 507 filelog = repo.file(file_)
508 508 if len(filelog):
509 509 r = filelog
510 510 if not r:
511 511 if not file_:
512 512 raise error.CommandError(cmd, _('invalid arguments'))
513 513 if not os.path.isfile(file_):
514 514 raise error.Abort(_("revlog '%s' not found") % file_)
515 515 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
516 516 file_[:-2] + ".i")
517 517 return r
518 518
519 519 def copy(ui, repo, pats, opts, rename=False):
520 520 # called with the repo lock held
521 521 #
522 522 # hgsep => pathname that uses "/" to separate directories
523 523 # ossep => pathname that uses os.sep to separate directories
524 524 cwd = repo.getcwd()
525 525 targets = {}
526 526 after = opts.get("after")
527 527 dryrun = opts.get("dry_run")
528 528 wctx = repo[None]
529 529
530 530 def walkpat(pat):
531 531 srcs = []
532 532 if after:
533 533 badstates = '?'
534 534 else:
535 535 badstates = '?r'
536 536 m = scmutil.match(repo[None], [pat], opts, globbed=True)
537 537 for abs in repo.walk(m):
538 538 state = repo.dirstate[abs]
539 539 rel = m.rel(abs)
540 540 exact = m.exact(abs)
541 541 if state in badstates:
542 542 if exact and state == '?':
543 543 ui.warn(_('%s: not copying - file is not managed\n') % rel)
544 544 if exact and state == 'r':
545 545 ui.warn(_('%s: not copying - file has been marked for'
546 546 ' remove\n') % rel)
547 547 continue
548 548 # abs: hgsep
549 549 # rel: ossep
550 550 srcs.append((abs, rel, exact))
551 551 return srcs
552 552
553 553 # abssrc: hgsep
554 554 # relsrc: ossep
555 555 # otarget: ossep
556 556 def copyfile(abssrc, relsrc, otarget, exact):
557 557 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
558 558 if '/' in abstarget:
559 559 # We cannot normalize abstarget itself, this would prevent
560 560 # case only renames, like a => A.
561 561 abspath, absname = abstarget.rsplit('/', 1)
562 562 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
563 563 reltarget = repo.pathto(abstarget, cwd)
564 564 target = repo.wjoin(abstarget)
565 565 src = repo.wjoin(abssrc)
566 566 state = repo.dirstate[abstarget]
567 567
568 568 scmutil.checkportable(ui, abstarget)
569 569
570 570 # check for collisions
571 571 prevsrc = targets.get(abstarget)
572 572 if prevsrc is not None:
573 573 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
574 574 (reltarget, repo.pathto(abssrc, cwd),
575 575 repo.pathto(prevsrc, cwd)))
576 576 return
577 577
578 578 # check for overwrites
579 579 exists = os.path.lexists(target)
580 580 samefile = False
581 581 if exists and abssrc != abstarget:
582 582 if (repo.dirstate.normalize(abssrc) ==
583 583 repo.dirstate.normalize(abstarget)):
584 584 if not rename:
585 585 ui.warn(_("%s: can't copy - same file\n") % reltarget)
586 586 return
587 587 exists = False
588 588 samefile = True
589 589
590 590 if not after and exists or after and state in 'mn':
591 591 if not opts['force']:
592 592 ui.warn(_('%s: not overwriting - file exists\n') %
593 593 reltarget)
594 594 return
595 595
596 596 if after:
597 597 if not exists:
598 598 if rename:
599 599 ui.warn(_('%s: not recording move - %s does not exist\n') %
600 600 (relsrc, reltarget))
601 601 else:
602 602 ui.warn(_('%s: not recording copy - %s does not exist\n') %
603 603 (relsrc, reltarget))
604 604 return
605 605 elif not dryrun:
606 606 try:
607 607 if exists:
608 608 os.unlink(target)
609 609 targetdir = os.path.dirname(target) or '.'
610 610 if not os.path.isdir(targetdir):
611 611 os.makedirs(targetdir)
612 612 if samefile:
613 613 tmp = target + "~hgrename"
614 614 os.rename(src, tmp)
615 615 os.rename(tmp, target)
616 616 else:
617 617 util.copyfile(src, target)
618 618 srcexists = True
619 619 except IOError as inst:
620 620 if inst.errno == errno.ENOENT:
621 621 ui.warn(_('%s: deleted in working directory\n') % relsrc)
622 622 srcexists = False
623 623 else:
624 624 ui.warn(_('%s: cannot copy - %s\n') %
625 625 (relsrc, inst.strerror))
626 626 return True # report a failure
627 627
628 628 if ui.verbose or not exact:
629 629 if rename:
630 630 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
631 631 else:
632 632 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
633 633
634 634 targets[abstarget] = abssrc
635 635
636 636 # fix up dirstate
637 637 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
638 638 dryrun=dryrun, cwd=cwd)
639 639 if rename and not dryrun:
640 640 if not after and srcexists and not samefile:
641 641 util.unlinkpath(repo.wjoin(abssrc))
642 642 wctx.forget([abssrc])
643 643
644 644 # pat: ossep
645 645 # dest ossep
646 646 # srcs: list of (hgsep, hgsep, ossep, bool)
647 647 # return: function that takes hgsep and returns ossep
648 648 def targetpathfn(pat, dest, srcs):
649 649 if os.path.isdir(pat):
650 650 abspfx = pathutil.canonpath(repo.root, cwd, pat)
651 651 abspfx = util.localpath(abspfx)
652 652 if destdirexists:
653 653 striplen = len(os.path.split(abspfx)[0])
654 654 else:
655 655 striplen = len(abspfx)
656 656 if striplen:
657 657 striplen += len(os.sep)
658 658 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
659 659 elif destdirexists:
660 660 res = lambda p: os.path.join(dest,
661 661 os.path.basename(util.localpath(p)))
662 662 else:
663 663 res = lambda p: dest
664 664 return res
665 665
666 666 # pat: ossep
667 667 # dest ossep
668 668 # srcs: list of (hgsep, hgsep, ossep, bool)
669 669 # return: function that takes hgsep and returns ossep
670 670 def targetpathafterfn(pat, dest, srcs):
671 671 if matchmod.patkind(pat):
672 672 # a mercurial pattern
673 673 res = lambda p: os.path.join(dest,
674 674 os.path.basename(util.localpath(p)))
675 675 else:
676 676 abspfx = pathutil.canonpath(repo.root, cwd, pat)
677 677 if len(abspfx) < len(srcs[0][0]):
678 678 # A directory. Either the target path contains the last
679 679 # component of the source path or it does not.
680 680 def evalpath(striplen):
681 681 score = 0
682 682 for s in srcs:
683 683 t = os.path.join(dest, util.localpath(s[0])[striplen:])
684 684 if os.path.lexists(t):
685 685 score += 1
686 686 return score
687 687
688 688 abspfx = util.localpath(abspfx)
689 689 striplen = len(abspfx)
690 690 if striplen:
691 691 striplen += len(os.sep)
692 692 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
693 693 score = evalpath(striplen)
694 694 striplen1 = len(os.path.split(abspfx)[0])
695 695 if striplen1:
696 696 striplen1 += len(os.sep)
697 697 if evalpath(striplen1) > score:
698 698 striplen = striplen1
699 699 res = lambda p: os.path.join(dest,
700 700 util.localpath(p)[striplen:])
701 701 else:
702 702 # a file
703 703 if destdirexists:
704 704 res = lambda p: os.path.join(dest,
705 705 os.path.basename(util.localpath(p)))
706 706 else:
707 707 res = lambda p: dest
708 708 return res
709 709
710 710 pats = scmutil.expandpats(pats)
711 711 if not pats:
712 712 raise error.Abort(_('no source or destination specified'))
713 713 if len(pats) == 1:
714 714 raise error.Abort(_('no destination specified'))
715 715 dest = pats.pop()
716 716 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
717 717 if not destdirexists:
718 718 if len(pats) > 1 or matchmod.patkind(pats[0]):
719 719 raise error.Abort(_('with multiple sources, destination must be an '
720 720 'existing directory'))
721 721 if util.endswithsep(dest):
722 722 raise error.Abort(_('destination %s is not a directory') % dest)
723 723
724 724 tfn = targetpathfn
725 725 if after:
726 726 tfn = targetpathafterfn
727 727 copylist = []
728 728 for pat in pats:
729 729 srcs = walkpat(pat)
730 730 if not srcs:
731 731 continue
732 732 copylist.append((tfn(pat, dest, srcs), srcs))
733 733 if not copylist:
734 734 raise error.Abort(_('no files to copy'))
735 735
736 736 errors = 0
737 737 for targetpath, srcs in copylist:
738 738 for abssrc, relsrc, exact in srcs:
739 739 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
740 740 errors += 1
741 741
742 742 if errors:
743 743 ui.warn(_('(consider using --after)\n'))
744 744
745 745 return errors != 0
746 746
747 747 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
748 748 runargs=None, appendpid=False):
749 749 '''Run a command as a service.'''
750 750
751 751 def writepid(pid):
752 752 if opts['pid_file']:
753 753 if appendpid:
754 754 mode = 'a'
755 755 else:
756 756 mode = 'w'
757 757 fp = open(opts['pid_file'], mode)
758 758 fp.write(str(pid) + '\n')
759 759 fp.close()
760 760
761 761 if opts['daemon'] and not opts['daemon_pipefds']:
762 762 # Signal child process startup with file removal
763 763 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
764 764 os.close(lockfd)
765 765 try:
766 766 if not runargs:
767 767 runargs = util.hgcmd() + sys.argv[1:]
768 768 runargs.append('--daemon-pipefds=%s' % lockpath)
769 769 # Don't pass --cwd to the child process, because we've already
770 770 # changed directory.
771 771 for i in xrange(1, len(runargs)):
772 772 if runargs[i].startswith('--cwd='):
773 773 del runargs[i]
774 774 break
775 775 elif runargs[i].startswith('--cwd'):
776 776 del runargs[i:i + 2]
777 777 break
778 778 def condfn():
779 779 return not os.path.exists(lockpath)
780 780 pid = util.rundetached(runargs, condfn)
781 781 if pid < 0:
782 782 raise error.Abort(_('child process failed to start'))
783 783 writepid(pid)
784 784 finally:
785 785 try:
786 786 os.unlink(lockpath)
787 787 except OSError as e:
788 788 if e.errno != errno.ENOENT:
789 789 raise
790 790 if parentfn:
791 791 return parentfn(pid)
792 792 else:
793 793 return
794 794
795 795 if initfn:
796 796 initfn()
797 797
798 798 if not opts['daemon']:
799 799 writepid(util.getpid())
800 800
801 801 if opts['daemon_pipefds']:
802 802 lockpath = opts['daemon_pipefds']
803 803 try:
804 804 os.setsid()
805 805 except AttributeError:
806 806 pass
807 807 os.unlink(lockpath)
808 808 util.hidewindow()
809 809 sys.stdout.flush()
810 810 sys.stderr.flush()
811 811
812 812 nullfd = os.open(os.devnull, os.O_RDWR)
813 813 logfilefd = nullfd
814 814 if logfile:
815 815 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
816 816 os.dup2(nullfd, 0)
817 817 os.dup2(logfilefd, 1)
818 818 os.dup2(logfilefd, 2)
819 819 if nullfd not in (0, 1, 2):
820 820 os.close(nullfd)
821 821 if logfile and logfilefd not in (0, 1, 2):
822 822 os.close(logfilefd)
823 823
824 824 if runfn:
825 825 return runfn()
826 826
827 827 ## facility to let extension process additional data into an import patch
828 828 # list of identifier to be executed in order
829 829 extrapreimport = [] # run before commit
830 830 extrapostimport = [] # run after commit
831 831 # mapping from identifier to actual import function
832 832 #
833 833 # 'preimport' are run before the commit is made and are provided the following
834 834 # arguments:
835 835 # - repo: the localrepository instance,
836 836 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
837 837 # - extra: the future extra dictionary of the changeset, please mutate it,
838 838 # - opts: the import options.
839 839 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
840 840 # mutation of in memory commit and more. Feel free to rework the code to get
841 841 # there.
842 842 extrapreimportmap = {}
843 843 # 'postimport' are run after the commit is made and are provided the following
844 844 # argument:
845 845 # - ctx: the changectx created by import.
846 846 extrapostimportmap = {}
847 847
848 848 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
849 849 """Utility function used by commands.import to import a single patch
850 850
851 851 This function is explicitly defined here to help the evolve extension to
852 852 wrap this part of the import logic.
853 853
854 854 The API is currently a bit ugly because it a simple code translation from
855 855 the import command. Feel free to make it better.
856 856
857 857 :hunk: a patch (as a binary string)
858 858 :parents: nodes that will be parent of the created commit
859 859 :opts: the full dict of option passed to the import command
860 860 :msgs: list to save commit message to.
861 861 (used in case we need to save it when failing)
862 862 :updatefunc: a function that update a repo to a given node
863 863 updatefunc(<repo>, <node>)
864 864 """
865 865 # avoid cycle context -> subrepo -> cmdutil
866 866 import context
867 867 extractdata = patch.extract(ui, hunk)
868 868 tmpname = extractdata.get('filename')
869 869 message = extractdata.get('message')
870 870 user = opts.get('user') or extractdata.get('user')
871 871 date = opts.get('date') or extractdata.get('date')
872 872 branch = extractdata.get('branch')
873 873 nodeid = extractdata.get('nodeid')
874 874 p1 = extractdata.get('p1')
875 875 p2 = extractdata.get('p2')
876 876
877 877 nocommit = opts.get('no_commit')
878 878 importbranch = opts.get('import_branch')
879 879 update = not opts.get('bypass')
880 880 strip = opts["strip"]
881 881 prefix = opts["prefix"]
882 882 sim = float(opts.get('similarity') or 0)
883 883 if not tmpname:
884 884 return (None, None, False)
885 885
886 886 rejects = False
887 887
888 888 try:
889 889 cmdline_message = logmessage(ui, opts)
890 890 if cmdline_message:
891 891 # pickup the cmdline msg
892 892 message = cmdline_message
893 893 elif message:
894 894 # pickup the patch msg
895 895 message = message.strip()
896 896 else:
897 897 # launch the editor
898 898 message = None
899 899 ui.debug('message:\n%s\n' % message)
900 900
901 901 if len(parents) == 1:
902 902 parents.append(repo[nullid])
903 903 if opts.get('exact'):
904 904 if not nodeid or not p1:
905 905 raise error.Abort(_('not a Mercurial patch'))
906 906 p1 = repo[p1]
907 907 p2 = repo[p2 or nullid]
908 908 elif p2:
909 909 try:
910 910 p1 = repo[p1]
911 911 p2 = repo[p2]
912 912 # Without any options, consider p2 only if the
913 913 # patch is being applied on top of the recorded
914 914 # first parent.
915 915 if p1 != parents[0]:
916 916 p1 = parents[0]
917 917 p2 = repo[nullid]
918 918 except error.RepoError:
919 919 p1, p2 = parents
920 920 if p2.node() == nullid:
921 921 ui.warn(_("warning: import the patch as a normal revision\n"
922 922 "(use --exact to import the patch as a merge)\n"))
923 923 else:
924 924 p1, p2 = parents
925 925
926 926 n = None
927 927 if update:
928 928 if p1 != parents[0]:
929 929 updatefunc(repo, p1.node())
930 930 if p2 != parents[1]:
931 931 repo.setparents(p1.node(), p2.node())
932 932
933 933 if opts.get('exact') or importbranch:
934 934 repo.dirstate.setbranch(branch or 'default')
935 935
936 936 partial = opts.get('partial', False)
937 937 files = set()
938 938 try:
939 939 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
940 940 files=files, eolmode=None, similarity=sim / 100.0)
941 941 except patch.PatchError as e:
942 942 if not partial:
943 943 raise error.Abort(str(e))
944 944 if partial:
945 945 rejects = True
946 946
947 947 files = list(files)
948 948 if nocommit:
949 949 if message:
950 950 msgs.append(message)
951 951 else:
952 952 if opts.get('exact') or p2:
953 953 # If you got here, you either use --force and know what
954 954 # you are doing or used --exact or a merge patch while
955 955 # being updated to its first parent.
956 956 m = None
957 957 else:
958 958 m = scmutil.matchfiles(repo, files or [])
959 959 editform = mergeeditform(repo[None], 'import.normal')
960 960 if opts.get('exact'):
961 961 editor = None
962 962 else:
963 963 editor = getcommiteditor(editform=editform, **opts)
964 964 allowemptyback = repo.ui.backupconfig('ui', 'allowemptycommit')
965 965 extra = {}
966 966 for idfunc in extrapreimport:
967 967 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
968 968 try:
969 969 if partial:
970 970 repo.ui.setconfig('ui', 'allowemptycommit', True)
971 971 n = repo.commit(message, user,
972 972 date, match=m,
973 973 editor=editor, extra=extra)
974 974 for idfunc in extrapostimport:
975 975 extrapostimportmap[idfunc](repo[n])
976 976 finally:
977 977 repo.ui.restoreconfig(allowemptyback)
978 978 else:
979 979 if opts.get('exact') or importbranch:
980 980 branch = branch or 'default'
981 981 else:
982 982 branch = p1.branch()
983 983 store = patch.filestore()
984 984 try:
985 985 files = set()
986 986 try:
987 987 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
988 988 files, eolmode=None)
989 989 except patch.PatchError as e:
990 990 raise error.Abort(str(e))
991 991 if opts.get('exact'):
992 992 editor = None
993 993 else:
994 994 editor = getcommiteditor(editform='import.bypass')
995 995 memctx = context.makememctx(repo, (p1.node(), p2.node()),
996 996 message,
997 997 user,
998 998 date,
999 999 branch, files, store,
1000 1000 editor=editor)
1001 1001 n = memctx.commit()
1002 1002 finally:
1003 1003 store.close()
1004 1004 if opts.get('exact') and nocommit:
1005 1005 # --exact with --no-commit is still useful in that it does merge
1006 1006 # and branch bits
1007 1007 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1008 1008 elif opts.get('exact') and hex(n) != nodeid:
1009 1009 raise error.Abort(_('patch is damaged or loses information'))
1010 1010 msg = _('applied to working directory')
1011 1011 if n:
1012 1012 # i18n: refers to a short changeset id
1013 1013 msg = _('created %s') % short(n)
1014 1014 return (msg, n, rejects)
1015 1015 finally:
1016 1016 os.unlink(tmpname)
1017 1017
1018 1018 # facility to let extensions include additional data in an exported patch
1019 1019 # list of identifiers to be executed in order
1020 1020 extraexport = []
1021 1021 # mapping from identifier to actual export function
1022 1022 # function as to return a string to be added to the header or None
1023 1023 # it is given two arguments (sequencenumber, changectx)
1024 1024 extraexportmap = {}
1025 1025
1026 1026 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1027 1027 opts=None, match=None):
1028 1028 '''export changesets as hg patches.'''
1029 1029
1030 1030 total = len(revs)
1031 1031 revwidth = max([len(str(rev)) for rev in revs])
1032 1032 filemode = {}
1033 1033
1034 1034 def single(rev, seqno, fp):
1035 1035 ctx = repo[rev]
1036 1036 node = ctx.node()
1037 1037 parents = [p.node() for p in ctx.parents() if p]
1038 1038 branch = ctx.branch()
1039 1039 if switch_parent:
1040 1040 parents.reverse()
1041 1041
1042 1042 if parents:
1043 1043 prev = parents[0]
1044 1044 else:
1045 1045 prev = nullid
1046 1046
1047 1047 shouldclose = False
1048 1048 if not fp and len(template) > 0:
1049 1049 desc_lines = ctx.description().rstrip().split('\n')
1050 1050 desc = desc_lines[0] #Commit always has a first line.
1051 1051 fp = makefileobj(repo, template, node, desc=desc, total=total,
1052 1052 seqno=seqno, revwidth=revwidth, mode='wb',
1053 1053 modemap=filemode)
1054 1054 shouldclose = True
1055 1055 if fp and not getattr(fp, 'name', '<unnamed>').startswith('<'):
1056 1056 repo.ui.note("%s\n" % fp.name)
1057 1057
1058 1058 if not fp:
1059 1059 write = repo.ui.write
1060 1060 else:
1061 1061 def write(s, **kw):
1062 1062 fp.write(s)
1063 1063
1064 1064 write("# HG changeset patch\n")
1065 1065 write("# User %s\n" % ctx.user())
1066 1066 write("# Date %d %d\n" % ctx.date())
1067 1067 write("# %s\n" % util.datestr(ctx.date()))
1068 1068 if branch and branch != 'default':
1069 1069 write("# Branch %s\n" % branch)
1070 1070 write("# Node ID %s\n" % hex(node))
1071 1071 write("# Parent %s\n" % hex(prev))
1072 1072 if len(parents) > 1:
1073 1073 write("# Parent %s\n" % hex(parents[1]))
1074 1074
1075 1075 for headerid in extraexport:
1076 1076 header = extraexportmap[headerid](seqno, ctx)
1077 1077 if header is not None:
1078 1078 write('# %s\n' % header)
1079 1079 write(ctx.description().rstrip())
1080 1080 write("\n\n")
1081 1081
1082 1082 for chunk, label in patch.diffui(repo, prev, node, match, opts=opts):
1083 1083 write(chunk, label=label)
1084 1084
1085 1085 if shouldclose:
1086 1086 fp.close()
1087 1087
1088 1088 for seqno, rev in enumerate(revs):
1089 1089 single(rev, seqno + 1, fp)
1090 1090
1091 1091 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1092 1092 changes=None, stat=False, fp=None, prefix='',
1093 1093 root='', listsubrepos=False):
1094 1094 '''show diff or diffstat.'''
1095 1095 if fp is None:
1096 1096 write = ui.write
1097 1097 else:
1098 1098 def write(s, **kw):
1099 1099 fp.write(s)
1100 1100
1101 1101 if root:
1102 1102 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1103 1103 else:
1104 1104 relroot = ''
1105 1105 if relroot != '':
1106 1106 # XXX relative roots currently don't work if the root is within a
1107 1107 # subrepo
1108 1108 uirelroot = match.uipath(relroot)
1109 1109 relroot += '/'
1110 1110 for matchroot in match.files():
1111 1111 if not matchroot.startswith(relroot):
1112 1112 ui.warn(_('warning: %s not inside relative root %s\n') % (
1113 1113 match.uipath(matchroot), uirelroot))
1114 1114
1115 1115 if stat:
1116 1116 diffopts = diffopts.copy(context=0)
1117 1117 width = 80
1118 1118 if not ui.plain():
1119 1119 width = ui.termwidth()
1120 1120 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1121 1121 prefix=prefix, relroot=relroot)
1122 1122 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1123 1123 width=width,
1124 1124 git=diffopts.git):
1125 1125 write(chunk, label=label)
1126 1126 else:
1127 1127 for chunk, label in patch.diffui(repo, node1, node2, match,
1128 1128 changes, diffopts, prefix=prefix,
1129 1129 relroot=relroot):
1130 1130 write(chunk, label=label)
1131 1131
1132 1132 if listsubrepos:
1133 1133 ctx1 = repo[node1]
1134 1134 ctx2 = repo[node2]
1135 1135 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1136 1136 tempnode2 = node2
1137 1137 try:
1138 1138 if node2 is not None:
1139 1139 tempnode2 = ctx2.substate[subpath][1]
1140 1140 except KeyError:
1141 1141 # A subrepo that existed in node1 was deleted between node1 and
1142 1142 # node2 (inclusive). Thus, ctx2's substate won't contain that
1143 1143 # subpath. The best we can do is to ignore it.
1144 1144 tempnode2 = None
1145 1145 submatch = matchmod.subdirmatcher(subpath, match)
1146 1146 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1147 1147 stat=stat, fp=fp, prefix=prefix)
1148 1148
1149 1149 class changeset_printer(object):
1150 1150 '''show changeset information when templating not requested.'''
1151 1151
1152 1152 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1153 1153 self.ui = ui
1154 1154 self.repo = repo
1155 1155 self.buffered = buffered
1156 1156 self.matchfn = matchfn
1157 1157 self.diffopts = diffopts
1158 1158 self.header = {}
1159 1159 self.hunk = {}
1160 1160 self.lastheader = None
1161 1161 self.footer = None
1162 1162
1163 1163 def flush(self, ctx):
1164 1164 rev = ctx.rev()
1165 1165 if rev in self.header:
1166 1166 h = self.header[rev]
1167 1167 if h != self.lastheader:
1168 1168 self.lastheader = h
1169 1169 self.ui.write(h)
1170 1170 del self.header[rev]
1171 1171 if rev in self.hunk:
1172 1172 self.ui.write(self.hunk[rev])
1173 1173 del self.hunk[rev]
1174 1174 return 1
1175 1175 return 0
1176 1176
1177 1177 def close(self):
1178 1178 if self.footer:
1179 1179 self.ui.write(self.footer)
1180 1180
1181 1181 def show(self, ctx, copies=None, matchfn=None, **props):
1182 1182 if self.buffered:
1183 1183 self.ui.pushbuffer(labeled=True)
1184 1184 self._show(ctx, copies, matchfn, props)
1185 1185 self.hunk[ctx.rev()] = self.ui.popbuffer()
1186 1186 else:
1187 1187 self._show(ctx, copies, matchfn, props)
1188 1188
1189 1189 def _show(self, ctx, copies, matchfn, props):
1190 1190 '''show a single changeset or file revision'''
1191 1191 changenode = ctx.node()
1192 1192 rev = ctx.rev()
1193 1193 if self.ui.debugflag:
1194 1194 hexfunc = hex
1195 1195 else:
1196 1196 hexfunc = short
1197 1197 # as of now, wctx.node() and wctx.rev() return None, but we want to
1198 1198 # show the same values as {node} and {rev} templatekw
1199 1199 revnode = (scmutil.intrev(rev), hexfunc(bin(ctx.hex())))
1200 1200
1201 1201 if self.ui.quiet:
1202 1202 self.ui.write("%d:%s\n" % revnode, label='log.node')
1203 1203 return
1204 1204
1205 1205 date = util.datestr(ctx.date())
1206 1206
1207 1207 # i18n: column positioning for "hg log"
1208 1208 self.ui.write(_("changeset: %d:%s\n") % revnode,
1209 1209 label='log.changeset changeset.%s' % ctx.phasestr())
1210 1210
1211 1211 # branches are shown first before any other names due to backwards
1212 1212 # compatibility
1213 1213 branch = ctx.branch()
1214 1214 # don't show the default branch name
1215 1215 if branch != 'default':
1216 1216 # i18n: column positioning for "hg log"
1217 1217 self.ui.write(_("branch: %s\n") % branch,
1218 1218 label='log.branch')
1219 1219
1220 1220 for name, ns in self.repo.names.iteritems():
1221 1221 # branches has special logic already handled above, so here we just
1222 1222 # skip it
1223 1223 if name == 'branches':
1224 1224 continue
1225 1225 # we will use the templatename as the color name since those two
1226 1226 # should be the same
1227 1227 for name in ns.names(self.repo, changenode):
1228 1228 self.ui.write(ns.logfmt % name,
1229 1229 label='log.%s' % ns.colorname)
1230 1230 if self.ui.debugflag:
1231 1231 # i18n: column positioning for "hg log"
1232 1232 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1233 1233 label='log.phase')
1234 1234 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1235 1235 label = 'log.parent changeset.%s' % pctx.phasestr()
1236 1236 # i18n: column positioning for "hg log"
1237 1237 self.ui.write(_("parent: %d:%s\n")
1238 1238 % (pctx.rev(), hexfunc(pctx.node())),
1239 1239 label=label)
1240 1240
1241 1241 if self.ui.debugflag and rev is not None:
1242 1242 mnode = ctx.manifestnode()
1243 1243 # i18n: column positioning for "hg log"
1244 1244 self.ui.write(_("manifest: %d:%s\n") %
1245 1245 (self.repo.manifest.rev(mnode), hex(mnode)),
1246 1246 label='ui.debug log.manifest')
1247 1247 # i18n: column positioning for "hg log"
1248 1248 self.ui.write(_("user: %s\n") % ctx.user(),
1249 1249 label='log.user')
1250 1250 # i18n: column positioning for "hg log"
1251 1251 self.ui.write(_("date: %s\n") % date,
1252 1252 label='log.date')
1253 1253
1254 1254 if self.ui.debugflag:
1255 1255 files = ctx.p1().status(ctx)[:3]
1256 1256 for key, value in zip([# i18n: column positioning for "hg log"
1257 1257 _("files:"),
1258 1258 # i18n: column positioning for "hg log"
1259 1259 _("files+:"),
1260 1260 # i18n: column positioning for "hg log"
1261 1261 _("files-:")], files):
1262 1262 if value:
1263 1263 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1264 1264 label='ui.debug log.files')
1265 1265 elif ctx.files() and self.ui.verbose:
1266 1266 # i18n: column positioning for "hg log"
1267 1267 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1268 1268 label='ui.note log.files')
1269 1269 if copies and self.ui.verbose:
1270 1270 copies = ['%s (%s)' % c for c in copies]
1271 1271 # i18n: column positioning for "hg log"
1272 1272 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1273 1273 label='ui.note log.copies')
1274 1274
1275 1275 extra = ctx.extra()
1276 1276 if extra and self.ui.debugflag:
1277 1277 for key, value in sorted(extra.items()):
1278 1278 # i18n: column positioning for "hg log"
1279 1279 self.ui.write(_("extra: %s=%s\n")
1280 1280 % (key, value.encode('string_escape')),
1281 1281 label='ui.debug log.extra')
1282 1282
1283 1283 description = ctx.description().strip()
1284 1284 if description:
1285 1285 if self.ui.verbose:
1286 1286 self.ui.write(_("description:\n"),
1287 1287 label='ui.note log.description')
1288 1288 self.ui.write(description,
1289 1289 label='ui.note log.description')
1290 1290 self.ui.write("\n\n")
1291 1291 else:
1292 1292 # i18n: column positioning for "hg log"
1293 1293 self.ui.write(_("summary: %s\n") %
1294 1294 description.splitlines()[0],
1295 1295 label='log.summary')
1296 1296 self.ui.write("\n")
1297 1297
1298 1298 self.showpatch(ctx, matchfn)
1299 1299
1300 1300 def showpatch(self, ctx, matchfn):
1301 1301 if not matchfn:
1302 1302 matchfn = self.matchfn
1303 1303 if matchfn:
1304 1304 stat = self.diffopts.get('stat')
1305 1305 diff = self.diffopts.get('patch')
1306 1306 diffopts = patch.diffallopts(self.ui, self.diffopts)
1307 1307 node = ctx.node()
1308 1308 prev = ctx.p1().node()
1309 1309 if stat:
1310 1310 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1311 1311 match=matchfn, stat=True)
1312 1312 if diff:
1313 1313 if stat:
1314 1314 self.ui.write("\n")
1315 1315 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1316 1316 match=matchfn, stat=False)
1317 1317 self.ui.write("\n")
1318 1318
1319 1319 class jsonchangeset(changeset_printer):
1320 1320 '''format changeset information.'''
1321 1321
1322 1322 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1323 1323 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1324 1324 self.cache = {}
1325 1325 self._first = True
1326 1326
1327 1327 def close(self):
1328 1328 if not self._first:
1329 1329 self.ui.write("\n]\n")
1330 1330 else:
1331 1331 self.ui.write("[]\n")
1332 1332
1333 1333 def _show(self, ctx, copies, matchfn, props):
1334 1334 '''show a single changeset or file revision'''
1335 1335 rev = ctx.rev()
1336 1336 if rev is None:
1337 1337 jrev = jnode = 'null'
1338 1338 else:
1339 1339 jrev = str(rev)
1340 1340 jnode = '"%s"' % hex(ctx.node())
1341 1341 j = encoding.jsonescape
1342 1342
1343 1343 if self._first:
1344 1344 self.ui.write("[\n {")
1345 1345 self._first = False
1346 1346 else:
1347 1347 self.ui.write(",\n {")
1348 1348
1349 1349 if self.ui.quiet:
1350 1350 self.ui.write('\n "rev": %s' % jrev)
1351 1351 self.ui.write(',\n "node": %s' % jnode)
1352 1352 self.ui.write('\n }')
1353 1353 return
1354 1354
1355 1355 self.ui.write('\n "rev": %s' % jrev)
1356 1356 self.ui.write(',\n "node": %s' % jnode)
1357 1357 self.ui.write(',\n "branch": "%s"' % j(ctx.branch()))
1358 1358 self.ui.write(',\n "phase": "%s"' % ctx.phasestr())
1359 1359 self.ui.write(',\n "user": "%s"' % j(ctx.user()))
1360 1360 self.ui.write(',\n "date": [%d, %d]' % ctx.date())
1361 1361 self.ui.write(',\n "desc": "%s"' % j(ctx.description()))
1362 1362
1363 1363 self.ui.write(',\n "bookmarks": [%s]' %
1364 1364 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1365 1365 self.ui.write(',\n "tags": [%s]' %
1366 1366 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1367 1367 self.ui.write(',\n "parents": [%s]' %
1368 1368 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1369 1369
1370 1370 if self.ui.debugflag:
1371 1371 if rev is None:
1372 1372 jmanifestnode = 'null'
1373 1373 else:
1374 1374 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1375 1375 self.ui.write(',\n "manifest": %s' % jmanifestnode)
1376 1376
1377 1377 self.ui.write(',\n "extra": {%s}' %
1378 1378 ", ".join('"%s": "%s"' % (j(k), j(v))
1379 1379 for k, v in ctx.extra().items()))
1380 1380
1381 1381 files = ctx.p1().status(ctx)
1382 1382 self.ui.write(',\n "modified": [%s]' %
1383 1383 ", ".join('"%s"' % j(f) for f in files[0]))
1384 1384 self.ui.write(',\n "added": [%s]' %
1385 1385 ", ".join('"%s"' % j(f) for f in files[1]))
1386 1386 self.ui.write(',\n "removed": [%s]' %
1387 1387 ", ".join('"%s"' % j(f) for f in files[2]))
1388 1388
1389 1389 elif self.ui.verbose:
1390 1390 self.ui.write(',\n "files": [%s]' %
1391 1391 ", ".join('"%s"' % j(f) for f in ctx.files()))
1392 1392
1393 1393 if copies:
1394 1394 self.ui.write(',\n "copies": {%s}' %
1395 1395 ", ".join('"%s": "%s"' % (j(k), j(v))
1396 1396 for k, v in copies))
1397 1397
1398 1398 matchfn = self.matchfn
1399 1399 if matchfn:
1400 1400 stat = self.diffopts.get('stat')
1401 1401 diff = self.diffopts.get('patch')
1402 1402 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1403 1403 node, prev = ctx.node(), ctx.p1().node()
1404 1404 if stat:
1405 1405 self.ui.pushbuffer()
1406 1406 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1407 1407 match=matchfn, stat=True)
1408 1408 self.ui.write(',\n "diffstat": "%s"' % j(self.ui.popbuffer()))
1409 1409 if diff:
1410 1410 self.ui.pushbuffer()
1411 1411 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1412 1412 match=matchfn, stat=False)
1413 1413 self.ui.write(',\n "diff": "%s"' % j(self.ui.popbuffer()))
1414 1414
1415 1415 self.ui.write("\n }")
1416 1416
1417 1417 class changeset_templater(changeset_printer):
1418 1418 '''format changeset information.'''
1419 1419
1420 1420 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1421 1421 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1422 1422 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
1423 1423 defaulttempl = {
1424 1424 'parent': '{rev}:{node|formatnode} ',
1425 1425 'manifest': '{rev}:{node|formatnode}',
1426 1426 'file_copy': '{name} ({source})',
1427 1427 'extra': '{key}={value|stringescape}'
1428 1428 }
1429 1429 # filecopy is preserved for compatibility reasons
1430 1430 defaulttempl['filecopy'] = defaulttempl['file_copy']
1431 1431 self.t = templater.templater(mapfile, {'formatnode': formatnode},
1432 1432 cache=defaulttempl)
1433 1433 if tmpl:
1434 1434 self.t.cache['changeset'] = tmpl
1435 1435
1436 1436 self.cache = {}
1437 1437
1438 1438 # find correct templates for current mode
1439 1439 tmplmodes = [
1440 1440 (True, None),
1441 1441 (self.ui.verbose, 'verbose'),
1442 1442 (self.ui.quiet, 'quiet'),
1443 1443 (self.ui.debugflag, 'debug'),
1444 1444 ]
1445 1445
1446 1446 self._parts = {'header': '', 'footer': '', 'changeset': 'changeset',
1447 1447 'docheader': '', 'docfooter': ''}
1448 1448 for mode, postfix in tmplmodes:
1449 1449 for t in self._parts:
1450 1450 cur = t
1451 1451 if postfix:
1452 1452 cur += "_" + postfix
1453 1453 if mode and cur in self.t:
1454 1454 self._parts[t] = cur
1455 1455
1456 1456 if self._parts['docheader']:
1457 1457 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1458 1458
1459 1459 def close(self):
1460 1460 if self._parts['docfooter']:
1461 1461 if not self.footer:
1462 1462 self.footer = ""
1463 1463 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1464 1464 return super(changeset_templater, self).close()
1465 1465
1466 1466 def _show(self, ctx, copies, matchfn, props):
1467 1467 '''show a single changeset or file revision'''
1468 1468 props = props.copy()
1469 1469 props.update(templatekw.keywords)
1470 1470 props['templ'] = self.t
1471 1471 props['ctx'] = ctx
1472 1472 props['repo'] = self.repo
1473 1473 props['revcache'] = {'copies': copies}
1474 1474 props['cache'] = self.cache
1475 1475
1476 1476 try:
1477 1477 # write header
1478 1478 if self._parts['header']:
1479 1479 h = templater.stringify(self.t(self._parts['header'], **props))
1480 1480 if self.buffered:
1481 1481 self.header[ctx.rev()] = h
1482 1482 else:
1483 1483 if self.lastheader != h:
1484 1484 self.lastheader = h
1485 1485 self.ui.write(h)
1486 1486
1487 1487 # write changeset metadata, then patch if requested
1488 1488 key = self._parts['changeset']
1489 1489 self.ui.write(templater.stringify(self.t(key, **props)))
1490 1490 self.showpatch(ctx, matchfn)
1491 1491
1492 1492 if self._parts['footer']:
1493 1493 if not self.footer:
1494 1494 self.footer = templater.stringify(
1495 1495 self.t(self._parts['footer'], **props))
1496 1496 except KeyError as inst:
1497 1497 msg = _("%s: no key named '%s'")
1498 1498 raise error.Abort(msg % (self.t.mapfile, inst.args[0]))
1499 1499 except SyntaxError as inst:
1500 1500 raise error.Abort('%s: %s' % (self.t.mapfile, inst.args[0]))
1501 1501
1502 1502 def gettemplate(ui, tmpl, style):
1503 1503 """
1504 1504 Find the template matching the given template spec or style.
1505 1505 """
1506 1506
1507 1507 # ui settings
1508 1508 if not tmpl and not style: # template are stronger than style
1509 1509 tmpl = ui.config('ui', 'logtemplate')
1510 1510 if tmpl:
1511 1511 try:
1512 1512 tmpl = templater.unquotestring(tmpl)
1513 1513 except SyntaxError:
1514 1514 pass
1515 1515 return tmpl, None
1516 1516 else:
1517 1517 style = util.expandpath(ui.config('ui', 'style', ''))
1518 1518
1519 1519 if not tmpl and style:
1520 1520 mapfile = style
1521 1521 if not os.path.split(mapfile)[0]:
1522 1522 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1523 1523 or templater.templatepath(mapfile))
1524 1524 if mapname:
1525 1525 mapfile = mapname
1526 1526 return None, mapfile
1527 1527
1528 1528 if not tmpl:
1529 1529 return None, None
1530 1530
1531 1531 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1532 1532
1533 1533 def show_changeset(ui, repo, opts, buffered=False):
1534 1534 """show one changeset using template or regular display.
1535 1535
1536 1536 Display format will be the first non-empty hit of:
1537 1537 1. option 'template'
1538 1538 2. option 'style'
1539 1539 3. [ui] setting 'logtemplate'
1540 1540 4. [ui] setting 'style'
1541 1541 If all of these values are either the unset or the empty string,
1542 1542 regular display via changeset_printer() is done.
1543 1543 """
1544 1544 # options
1545 1545 matchfn = None
1546 1546 if opts.get('patch') or opts.get('stat'):
1547 1547 matchfn = scmutil.matchall(repo)
1548 1548
1549 1549 if opts.get('template') == 'json':
1550 1550 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1551 1551
1552 1552 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1553 1553
1554 1554 if not tmpl and not mapfile:
1555 1555 return changeset_printer(ui, repo, matchfn, opts, buffered)
1556 1556
1557 1557 try:
1558 1558 t = changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile,
1559 1559 buffered)
1560 1560 except SyntaxError as inst:
1561 1561 raise error.Abort(inst.args[0])
1562 1562 return t
1563 1563
1564 1564 def showmarker(ui, marker):
1565 1565 """utility function to display obsolescence marker in a readable way
1566 1566
1567 1567 To be used by debug function."""
1568 1568 ui.write(hex(marker.precnode()))
1569 1569 for repl in marker.succnodes():
1570 1570 ui.write(' ')
1571 1571 ui.write(hex(repl))
1572 1572 ui.write(' %X ' % marker.flags())
1573 1573 parents = marker.parentnodes()
1574 1574 if parents is not None:
1575 1575 ui.write('{%s} ' % ', '.join(hex(p) for p in parents))
1576 1576 ui.write('(%s) ' % util.datestr(marker.date()))
1577 1577 ui.write('{%s}' % (', '.join('%r: %r' % t for t in
1578 1578 sorted(marker.metadata().items())
1579 1579 if t[0] != 'date')))
1580 1580 ui.write('\n')
1581 1581
1582 1582 def finddate(ui, repo, date):
1583 1583 """Find the tipmost changeset that matches the given date spec"""
1584 1584
1585 1585 df = util.matchdate(date)
1586 1586 m = scmutil.matchall(repo)
1587 1587 results = {}
1588 1588
1589 1589 def prep(ctx, fns):
1590 1590 d = ctx.date()
1591 1591 if df(d[0]):
1592 1592 results[ctx.rev()] = d
1593 1593
1594 1594 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1595 1595 rev = ctx.rev()
1596 1596 if rev in results:
1597 1597 ui.status(_("found revision %s from %s\n") %
1598 1598 (rev, util.datestr(results[rev])))
1599 1599 return str(rev)
1600 1600
1601 1601 raise error.Abort(_("revision matching date not found"))
1602 1602
1603 1603 def increasingwindows(windowsize=8, sizelimit=512):
1604 1604 while True:
1605 1605 yield windowsize
1606 1606 if windowsize < sizelimit:
1607 1607 windowsize *= 2
1608 1608
1609 1609 class FileWalkError(Exception):
1610 1610 pass
1611 1611
1612 1612 def walkfilerevs(repo, match, follow, revs, fncache):
1613 1613 '''Walks the file history for the matched files.
1614 1614
1615 1615 Returns the changeset revs that are involved in the file history.
1616 1616
1617 1617 Throws FileWalkError if the file history can't be walked using
1618 1618 filelogs alone.
1619 1619 '''
1620 1620 wanted = set()
1621 1621 copies = []
1622 1622 minrev, maxrev = min(revs), max(revs)
1623 1623 def filerevgen(filelog, last):
1624 1624 """
1625 1625 Only files, no patterns. Check the history of each file.
1626 1626
1627 1627 Examines filelog entries within minrev, maxrev linkrev range
1628 1628 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1629 1629 tuples in backwards order
1630 1630 """
1631 1631 cl_count = len(repo)
1632 1632 revs = []
1633 1633 for j in xrange(0, last + 1):
1634 1634 linkrev = filelog.linkrev(j)
1635 1635 if linkrev < minrev:
1636 1636 continue
1637 1637 # only yield rev for which we have the changelog, it can
1638 1638 # happen while doing "hg log" during a pull or commit
1639 1639 if linkrev >= cl_count:
1640 1640 break
1641 1641
1642 1642 parentlinkrevs = []
1643 1643 for p in filelog.parentrevs(j):
1644 1644 if p != nullrev:
1645 1645 parentlinkrevs.append(filelog.linkrev(p))
1646 1646 n = filelog.node(j)
1647 1647 revs.append((linkrev, parentlinkrevs,
1648 1648 follow and filelog.renamed(n)))
1649 1649
1650 1650 return reversed(revs)
1651 1651 def iterfiles():
1652 1652 pctx = repo['.']
1653 1653 for filename in match.files():
1654 1654 if follow:
1655 1655 if filename not in pctx:
1656 1656 raise error.Abort(_('cannot follow file not in parent '
1657 1657 'revision: "%s"') % filename)
1658 1658 yield filename, pctx[filename].filenode()
1659 1659 else:
1660 1660 yield filename, None
1661 1661 for filename_node in copies:
1662 1662 yield filename_node
1663 1663
1664 1664 for file_, node in iterfiles():
1665 1665 filelog = repo.file(file_)
1666 1666 if not len(filelog):
1667 1667 if node is None:
1668 1668 # A zero count may be a directory or deleted file, so
1669 1669 # try to find matching entries on the slow path.
1670 1670 if follow:
1671 1671 raise error.Abort(
1672 1672 _('cannot follow nonexistent file: "%s"') % file_)
1673 1673 raise FileWalkError("Cannot walk via filelog")
1674 1674 else:
1675 1675 continue
1676 1676
1677 1677 if node is None:
1678 1678 last = len(filelog) - 1
1679 1679 else:
1680 1680 last = filelog.rev(node)
1681 1681
1682 1682 # keep track of all ancestors of the file
1683 1683 ancestors = set([filelog.linkrev(last)])
1684 1684
1685 1685 # iterate from latest to oldest revision
1686 1686 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1687 1687 if not follow:
1688 1688 if rev > maxrev:
1689 1689 continue
1690 1690 else:
1691 1691 # Note that last might not be the first interesting
1692 1692 # rev to us:
1693 1693 # if the file has been changed after maxrev, we'll
1694 1694 # have linkrev(last) > maxrev, and we still need
1695 1695 # to explore the file graph
1696 1696 if rev not in ancestors:
1697 1697 continue
1698 1698 # XXX insert 1327 fix here
1699 1699 if flparentlinkrevs:
1700 1700 ancestors.update(flparentlinkrevs)
1701 1701
1702 1702 fncache.setdefault(rev, []).append(file_)
1703 1703 wanted.add(rev)
1704 1704 if copied:
1705 1705 copies.append(copied)
1706 1706
1707 1707 return wanted
1708 1708
1709 1709 class _followfilter(object):
1710 1710 def __init__(self, repo, onlyfirst=False):
1711 1711 self.repo = repo
1712 1712 self.startrev = nullrev
1713 1713 self.roots = set()
1714 1714 self.onlyfirst = onlyfirst
1715 1715
1716 1716 def match(self, rev):
1717 1717 def realparents(rev):
1718 1718 if self.onlyfirst:
1719 1719 return self.repo.changelog.parentrevs(rev)[0:1]
1720 1720 else:
1721 1721 return filter(lambda x: x != nullrev,
1722 1722 self.repo.changelog.parentrevs(rev))
1723 1723
1724 1724 if self.startrev == nullrev:
1725 1725 self.startrev = rev
1726 1726 return True
1727 1727
1728 1728 if rev > self.startrev:
1729 1729 # forward: all descendants
1730 1730 if not self.roots:
1731 1731 self.roots.add(self.startrev)
1732 1732 for parent in realparents(rev):
1733 1733 if parent in self.roots:
1734 1734 self.roots.add(rev)
1735 1735 return True
1736 1736 else:
1737 1737 # backwards: all parents
1738 1738 if not self.roots:
1739 1739 self.roots.update(realparents(self.startrev))
1740 1740 if rev in self.roots:
1741 1741 self.roots.remove(rev)
1742 1742 self.roots.update(realparents(rev))
1743 1743 return True
1744 1744
1745 1745 return False
1746 1746
1747 1747 def walkchangerevs(repo, match, opts, prepare):
1748 1748 '''Iterate over files and the revs in which they changed.
1749 1749
1750 1750 Callers most commonly need to iterate backwards over the history
1751 1751 in which they are interested. Doing so has awful (quadratic-looking)
1752 1752 performance, so we use iterators in a "windowed" way.
1753 1753
1754 1754 We walk a window of revisions in the desired order. Within the
1755 1755 window, we first walk forwards to gather data, then in the desired
1756 1756 order (usually backwards) to display it.
1757 1757
1758 1758 This function returns an iterator yielding contexts. Before
1759 1759 yielding each context, the iterator will first call the prepare
1760 1760 function on each context in the window in forward order.'''
1761 1761
1762 1762 follow = opts.get('follow') or opts.get('follow_first')
1763 1763 revs = _logrevs(repo, opts)
1764 1764 if not revs:
1765 1765 return []
1766 1766 wanted = set()
1767 1767 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1768 1768 opts.get('removed'))
1769 1769 fncache = {}
1770 1770 change = repo.changectx
1771 1771
1772 1772 # First step is to fill wanted, the set of revisions that we want to yield.
1773 1773 # When it does not induce extra cost, we also fill fncache for revisions in
1774 1774 # wanted: a cache of filenames that were changed (ctx.files()) and that
1775 1775 # match the file filtering conditions.
1776 1776
1777 1777 if match.always():
1778 1778 # No files, no patterns. Display all revs.
1779 1779 wanted = revs
1780 1780 elif not slowpath:
1781 1781 # We only have to read through the filelog to find wanted revisions
1782 1782
1783 1783 try:
1784 1784 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1785 1785 except FileWalkError:
1786 1786 slowpath = True
1787 1787
1788 1788 # We decided to fall back to the slowpath because at least one
1789 1789 # of the paths was not a file. Check to see if at least one of them
1790 1790 # existed in history, otherwise simply return
1791 1791 for path in match.files():
1792 1792 if path == '.' or path in repo.store:
1793 1793 break
1794 1794 else:
1795 1795 return []
1796 1796
1797 1797 if slowpath:
1798 1798 # We have to read the changelog to match filenames against
1799 1799 # changed files
1800 1800
1801 1801 if follow:
1802 1802 raise error.Abort(_('can only follow copies/renames for explicit '
1803 1803 'filenames'))
1804 1804
1805 1805 # The slow path checks files modified in every changeset.
1806 1806 # This is really slow on large repos, so compute the set lazily.
1807 1807 class lazywantedset(object):
1808 1808 def __init__(self):
1809 1809 self.set = set()
1810 1810 self.revs = set(revs)
1811 1811
1812 1812 # No need to worry about locality here because it will be accessed
1813 1813 # in the same order as the increasing window below.
1814 1814 def __contains__(self, value):
1815 1815 if value in self.set:
1816 1816 return True
1817 1817 elif not value in self.revs:
1818 1818 return False
1819 1819 else:
1820 1820 self.revs.discard(value)
1821 1821 ctx = change(value)
1822 1822 matches = filter(match, ctx.files())
1823 1823 if matches:
1824 1824 fncache[value] = matches
1825 1825 self.set.add(value)
1826 1826 return True
1827 1827 return False
1828 1828
1829 1829 def discard(self, value):
1830 1830 self.revs.discard(value)
1831 1831 self.set.discard(value)
1832 1832
1833 1833 wanted = lazywantedset()
1834 1834
1835 1835 # it might be worthwhile to do this in the iterator if the rev range
1836 1836 # is descending and the prune args are all within that range
1837 1837 for rev in opts.get('prune', ()):
1838 1838 rev = repo[rev].rev()
1839 1839 ff = _followfilter(repo)
1840 1840 stop = min(revs[0], revs[-1])
1841 1841 for x in xrange(rev, stop - 1, -1):
1842 1842 if ff.match(x):
1843 1843 wanted = wanted - [x]
1844 1844
1845 1845 # Now that wanted is correctly initialized, we can iterate over the
1846 1846 # revision range, yielding only revisions in wanted.
1847 1847 def iterate():
1848 1848 if follow and match.always():
1849 1849 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1850 1850 def want(rev):
1851 1851 return ff.match(rev) and rev in wanted
1852 1852 else:
1853 1853 def want(rev):
1854 1854 return rev in wanted
1855 1855
1856 1856 it = iter(revs)
1857 1857 stopiteration = False
1858 1858 for windowsize in increasingwindows():
1859 1859 nrevs = []
1860 1860 for i in xrange(windowsize):
1861 1861 rev = next(it, None)
1862 1862 if rev is None:
1863 1863 stopiteration = True
1864 1864 break
1865 1865 elif want(rev):
1866 1866 nrevs.append(rev)
1867 1867 for rev in sorted(nrevs):
1868 1868 fns = fncache.get(rev)
1869 1869 ctx = change(rev)
1870 1870 if not fns:
1871 1871 def fns_generator():
1872 1872 for f in ctx.files():
1873 1873 if match(f):
1874 1874 yield f
1875 1875 fns = fns_generator()
1876 1876 prepare(ctx, fns)
1877 1877 for rev in nrevs:
1878 1878 yield change(rev)
1879 1879
1880 1880 if stopiteration:
1881 1881 break
1882 1882
1883 1883 return iterate()
1884 1884
1885 1885 def _makefollowlogfilematcher(repo, files, followfirst):
1886 1886 # When displaying a revision with --patch --follow FILE, we have
1887 1887 # to know which file of the revision must be diffed. With
1888 1888 # --follow, we want the names of the ancestors of FILE in the
1889 1889 # revision, stored in "fcache". "fcache" is populated by
1890 1890 # reproducing the graph traversal already done by --follow revset
1891 1891 # and relating linkrevs to file names (which is not "correct" but
1892 1892 # good enough).
1893 1893 fcache = {}
1894 1894 fcacheready = [False]
1895 1895 pctx = repo['.']
1896 1896
1897 1897 def populate():
1898 1898 for fn in files:
1899 1899 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
1900 1900 for c in i:
1901 1901 fcache.setdefault(c.linkrev(), set()).add(c.path())
1902 1902
1903 1903 def filematcher(rev):
1904 1904 if not fcacheready[0]:
1905 1905 # Lazy initialization
1906 1906 fcacheready[0] = True
1907 1907 populate()
1908 1908 return scmutil.matchfiles(repo, fcache.get(rev, []))
1909 1909
1910 1910 return filematcher
1911 1911
1912 1912 def _makenofollowlogfilematcher(repo, pats, opts):
1913 1913 '''hook for extensions to override the filematcher for non-follow cases'''
1914 1914 return None
1915 1915
1916 1916 def _makelogrevset(repo, pats, opts, revs):
1917 1917 """Return (expr, filematcher) where expr is a revset string built
1918 1918 from log options and file patterns or None. If --stat or --patch
1919 1919 are not passed filematcher is None. Otherwise it is a callable
1920 1920 taking a revision number and returning a match objects filtering
1921 1921 the files to be detailed when displaying the revision.
1922 1922 """
1923 1923 opt2revset = {
1924 1924 'no_merges': ('not merge()', None),
1925 1925 'only_merges': ('merge()', None),
1926 1926 '_ancestors': ('ancestors(%(val)s)', None),
1927 1927 '_fancestors': ('_firstancestors(%(val)s)', None),
1928 1928 '_descendants': ('descendants(%(val)s)', None),
1929 1929 '_fdescendants': ('_firstdescendants(%(val)s)', None),
1930 1930 '_matchfiles': ('_matchfiles(%(val)s)', None),
1931 1931 'date': ('date(%(val)r)', None),
1932 1932 'branch': ('branch(%(val)r)', ' or '),
1933 1933 '_patslog': ('filelog(%(val)r)', ' or '),
1934 1934 '_patsfollow': ('follow(%(val)r)', ' or '),
1935 1935 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
1936 1936 'keyword': ('keyword(%(val)r)', ' or '),
1937 1937 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
1938 1938 'user': ('user(%(val)r)', ' or '),
1939 1939 }
1940 1940
1941 1941 opts = dict(opts)
1942 1942 # follow or not follow?
1943 1943 follow = opts.get('follow') or opts.get('follow_first')
1944 1944 if opts.get('follow_first'):
1945 1945 followfirst = 1
1946 1946 else:
1947 1947 followfirst = 0
1948 1948 # --follow with FILE behavior depends on revs...
1949 1949 it = iter(revs)
1950 1950 startrev = it.next()
1951 1951 followdescendants = startrev < next(it, startrev)
1952 1952
1953 1953 # branch and only_branch are really aliases and must be handled at
1954 1954 # the same time
1955 1955 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
1956 1956 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
1957 1957 # pats/include/exclude are passed to match.match() directly in
1958 1958 # _matchfiles() revset but walkchangerevs() builds its matcher with
1959 1959 # scmutil.match(). The difference is input pats are globbed on
1960 1960 # platforms without shell expansion (windows).
1961 1961 wctx = repo[None]
1962 1962 match, pats = scmutil.matchandpats(wctx, pats, opts)
1963 1963 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1964 1964 opts.get('removed'))
1965 1965 if not slowpath:
1966 1966 for f in match.files():
1967 1967 if follow and f not in wctx:
1968 1968 # If the file exists, it may be a directory, so let it
1969 1969 # take the slow path.
1970 1970 if os.path.exists(repo.wjoin(f)):
1971 1971 slowpath = True
1972 1972 continue
1973 1973 else:
1974 1974 raise error.Abort(_('cannot follow file not in parent '
1975 1975 'revision: "%s"') % f)
1976 1976 filelog = repo.file(f)
1977 1977 if not filelog:
1978 1978 # A zero count may be a directory or deleted file, so
1979 1979 # try to find matching entries on the slow path.
1980 1980 if follow:
1981 1981 raise error.Abort(
1982 1982 _('cannot follow nonexistent file: "%s"') % f)
1983 1983 slowpath = True
1984 1984
1985 1985 # We decided to fall back to the slowpath because at least one
1986 1986 # of the paths was not a file. Check to see if at least one of them
1987 1987 # existed in history - in that case, we'll continue down the
1988 1988 # slowpath; otherwise, we can turn off the slowpath
1989 1989 if slowpath:
1990 1990 for path in match.files():
1991 1991 if path == '.' or path in repo.store:
1992 1992 break
1993 1993 else:
1994 1994 slowpath = False
1995 1995
1996 1996 fpats = ('_patsfollow', '_patsfollowfirst')
1997 1997 fnopats = (('_ancestors', '_fancestors'),
1998 1998 ('_descendants', '_fdescendants'))
1999 1999 if slowpath:
2000 2000 # See walkchangerevs() slow path.
2001 2001 #
2002 2002 # pats/include/exclude cannot be represented as separate
2003 2003 # revset expressions as their filtering logic applies at file
2004 2004 # level. For instance "-I a -X a" matches a revision touching
2005 2005 # "a" and "b" while "file(a) and not file(b)" does
2006 2006 # not. Besides, filesets are evaluated against the working
2007 2007 # directory.
2008 2008 matchargs = ['r:', 'd:relpath']
2009 2009 for p in pats:
2010 2010 matchargs.append('p:' + p)
2011 2011 for p in opts.get('include', []):
2012 2012 matchargs.append('i:' + p)
2013 2013 for p in opts.get('exclude', []):
2014 2014 matchargs.append('x:' + p)
2015 2015 matchargs = ','.join(('%r' % p) for p in matchargs)
2016 2016 opts['_matchfiles'] = matchargs
2017 2017 if follow:
2018 2018 opts[fnopats[0][followfirst]] = '.'
2019 2019 else:
2020 2020 if follow:
2021 2021 if pats:
2022 2022 # follow() revset interprets its file argument as a
2023 2023 # manifest entry, so use match.files(), not pats.
2024 2024 opts[fpats[followfirst]] = list(match.files())
2025 2025 else:
2026 2026 op = fnopats[followdescendants][followfirst]
2027 2027 opts[op] = 'rev(%d)' % startrev
2028 2028 else:
2029 2029 opts['_patslog'] = list(pats)
2030 2030
2031 2031 filematcher = None
2032 2032 if opts.get('patch') or opts.get('stat'):
2033 2033 # When following files, track renames via a special matcher.
2034 2034 # If we're forced to take the slowpath it means we're following
2035 2035 # at least one pattern/directory, so don't bother with rename tracking.
2036 2036 if follow and not match.always() and not slowpath:
2037 2037 # _makefollowlogfilematcher expects its files argument to be
2038 2038 # relative to the repo root, so use match.files(), not pats.
2039 2039 filematcher = _makefollowlogfilematcher(repo, match.files(),
2040 2040 followfirst)
2041 2041 else:
2042 2042 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2043 2043 if filematcher is None:
2044 2044 filematcher = lambda rev: match
2045 2045
2046 2046 expr = []
2047 2047 for op, val in sorted(opts.iteritems()):
2048 2048 if not val:
2049 2049 continue
2050 2050 if op not in opt2revset:
2051 2051 continue
2052 2052 revop, andor = opt2revset[op]
2053 2053 if '%(val)' not in revop:
2054 2054 expr.append(revop)
2055 2055 else:
2056 2056 if not isinstance(val, list):
2057 2057 e = revop % {'val': val}
2058 2058 else:
2059 2059 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2060 2060 expr.append(e)
2061 2061
2062 2062 if expr:
2063 2063 expr = '(' + ' and '.join(expr) + ')'
2064 2064 else:
2065 2065 expr = None
2066 2066 return expr, filematcher
2067 2067
2068 2068 def _logrevs(repo, opts):
2069 2069 # Default --rev value depends on --follow but --follow behavior
2070 2070 # depends on revisions resolved from --rev...
2071 2071 follow = opts.get('follow') or opts.get('follow_first')
2072 2072 if opts.get('rev'):
2073 2073 revs = scmutil.revrange(repo, opts['rev'])
2074 2074 elif follow and repo.dirstate.p1() == nullid:
2075 2075 revs = revset.baseset()
2076 2076 elif follow:
2077 2077 revs = repo.revs('reverse(:.)')
2078 2078 else:
2079 2079 revs = revset.spanset(repo)
2080 2080 revs.reverse()
2081 2081 return revs
2082 2082
2083 2083 def getgraphlogrevs(repo, pats, opts):
2084 2084 """Return (revs, expr, filematcher) where revs is an iterable of
2085 2085 revision numbers, expr is a revset string built from log options
2086 2086 and file patterns or None, and used to filter 'revs'. If --stat or
2087 2087 --patch are not passed filematcher is None. Otherwise it is a
2088 2088 callable taking a revision number and returning a match objects
2089 2089 filtering the files to be detailed when displaying the revision.
2090 2090 """
2091 2091 limit = loglimit(opts)
2092 2092 revs = _logrevs(repo, opts)
2093 2093 if not revs:
2094 2094 return revset.baseset(), None, None
2095 2095 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2096 2096 if opts.get('rev'):
2097 2097 # User-specified revs might be unsorted, but don't sort before
2098 2098 # _makelogrevset because it might depend on the order of revs
2099 2099 revs.sort(reverse=True)
2100 2100 if expr:
2101 2101 # Revset matchers often operate faster on revisions in changelog
2102 2102 # order, because most filters deal with the changelog.
2103 2103 revs.reverse()
2104 2104 matcher = revset.match(repo.ui, expr)
2105 2105 # Revset matches can reorder revisions. "A or B" typically returns
2106 2106 # returns the revision matching A then the revision matching B. Sort
2107 2107 # again to fix that.
2108 2108 revs = matcher(repo, revs)
2109 2109 revs.sort(reverse=True)
2110 2110 if limit is not None:
2111 2111 limitedrevs = []
2112 2112 for idx, rev in enumerate(revs):
2113 2113 if idx >= limit:
2114 2114 break
2115 2115 limitedrevs.append(rev)
2116 2116 revs = revset.baseset(limitedrevs)
2117 2117
2118 2118 return revs, expr, filematcher
2119 2119
2120 2120 def getlogrevs(repo, pats, opts):
2121 2121 """Return (revs, expr, filematcher) where revs is an iterable of
2122 2122 revision numbers, expr is a revset string built from log options
2123 2123 and file patterns or None, and used to filter 'revs'. If --stat or
2124 2124 --patch are not passed filematcher is None. Otherwise it is a
2125 2125 callable taking a revision number and returning a match objects
2126 2126 filtering the files to be detailed when displaying the revision.
2127 2127 """
2128 2128 limit = loglimit(opts)
2129 2129 revs = _logrevs(repo, opts)
2130 2130 if not revs:
2131 2131 return revset.baseset([]), None, None
2132 2132 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2133 2133 if expr:
2134 2134 # Revset matchers often operate faster on revisions in changelog
2135 2135 # order, because most filters deal with the changelog.
2136 2136 if not opts.get('rev'):
2137 2137 revs.reverse()
2138 2138 matcher = revset.match(repo.ui, expr)
2139 2139 # Revset matches can reorder revisions. "A or B" typically returns
2140 2140 # returns the revision matching A then the revision matching B. Sort
2141 2141 # again to fix that.
2142 2142 revs = matcher(repo, revs)
2143 2143 if not opts.get('rev'):
2144 2144 revs.sort(reverse=True)
2145 2145 if limit is not None:
2146 2146 limitedrevs = []
2147 2147 for idx, r in enumerate(revs):
2148 2148 if limit <= idx:
2149 2149 break
2150 2150 limitedrevs.append(r)
2151 2151 revs = revset.baseset(limitedrevs)
2152 2152
2153 2153 return revs, expr, filematcher
2154 2154
2155 2155 def _graphnodeformatter(ui, displayer):
2156 2156 spec = ui.config('ui', 'graphnodetemplate')
2157 2157 if not spec:
2158 2158 return templatekw.showgraphnode # fast path for "{graphnode}"
2159 2159
2160 2160 templ = formatter.gettemplater(ui, 'graphnode', spec)
2161 2161 cache = {}
2162 2162 if isinstance(displayer, changeset_templater):
2163 2163 cache = displayer.cache # reuse cache of slow templates
2164 2164 props = templatekw.keywords.copy()
2165 2165 props['templ'] = templ
2166 2166 props['cache'] = cache
2167 2167 def formatnode(repo, ctx):
2168 2168 props['ctx'] = ctx
2169 2169 props['repo'] = repo
2170 2170 props['revcache'] = {}
2171 2171 return templater.stringify(templ('graphnode', **props))
2172 2172 return formatnode
2173 2173
2174 2174 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2175 2175 filematcher=None):
2176 2176 formatnode = _graphnodeformatter(ui, displayer)
2177 2177 seen, state = [], graphmod.asciistate()
2178 2178 for rev, type, ctx, parents in dag:
2179 2179 char = formatnode(repo, ctx)
2180 2180 copies = None
2181 2181 if getrenamed and ctx.rev():
2182 2182 copies = []
2183 2183 for fn in ctx.files():
2184 2184 rename = getrenamed(fn, ctx.rev())
2185 2185 if rename:
2186 2186 copies.append((fn, rename[0]))
2187 2187 revmatchfn = None
2188 2188 if filematcher is not None:
2189 2189 revmatchfn = filematcher(ctx.rev())
2190 2190 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2191 2191 lines = displayer.hunk.pop(rev).split('\n')
2192 2192 if not lines[-1]:
2193 2193 del lines[-1]
2194 2194 displayer.flush(ctx)
2195 2195 edges = edgefn(type, char, lines, seen, rev, parents)
2196 2196 for type, char, lines, coldata in edges:
2197 2197 graphmod.ascii(ui, state, type, char, lines, coldata)
2198 2198 displayer.close()
2199 2199
2200 2200 def graphlog(ui, repo, *pats, **opts):
2201 2201 # Parameters are identical to log command ones
2202 2202 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2203 2203 revdag = graphmod.dagwalker(repo, revs)
2204 2204
2205 2205 getrenamed = None
2206 2206 if opts.get('copies'):
2207 2207 endrev = None
2208 2208 if opts.get('rev'):
2209 2209 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2210 2210 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2211 2211 displayer = show_changeset(ui, repo, opts, buffered=True)
2212 2212 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2213 2213 filematcher)
2214 2214
2215 2215 def checkunsupportedgraphflags(pats, opts):
2216 2216 for op in ["newest_first"]:
2217 2217 if op in opts and opts[op]:
2218 2218 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2219 2219 % op.replace("_", "-"))
2220 2220
2221 2221 def graphrevs(repo, nodes, opts):
2222 2222 limit = loglimit(opts)
2223 2223 nodes.reverse()
2224 2224 if limit is not None:
2225 2225 nodes = nodes[:limit]
2226 2226 return graphmod.nodes(repo, nodes)
2227 2227
2228 2228 def add(ui, repo, match, prefix, explicitonly, **opts):
2229 2229 join = lambda f: os.path.join(prefix, f)
2230 2230 bad = []
2231 2231
2232 2232 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2233 2233 names = []
2234 2234 wctx = repo[None]
2235 2235 cca = None
2236 2236 abort, warn = scmutil.checkportabilityalert(ui)
2237 2237 if abort or warn:
2238 2238 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2239 2239
2240 2240 badmatch = matchmod.badmatch(match, badfn)
2241 2241 dirstate = repo.dirstate
2242 2242 # We don't want to just call wctx.walk here, since it would return a lot of
2243 2243 # clean files, which we aren't interested in and takes time.
2244 2244 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2245 2245 True, False, full=False)):
2246 2246 exact = match.exact(f)
2247 2247 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2248 2248 if cca:
2249 2249 cca(f)
2250 2250 names.append(f)
2251 2251 if ui.verbose or not exact:
2252 2252 ui.status(_('adding %s\n') % match.rel(f))
2253 2253
2254 2254 for subpath in sorted(wctx.substate):
2255 2255 sub = wctx.sub(subpath)
2256 2256 try:
2257 2257 submatch = matchmod.subdirmatcher(subpath, match)
2258 2258 if opts.get('subrepos'):
2259 2259 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2260 2260 else:
2261 2261 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2262 2262 except error.LookupError:
2263 2263 ui.status(_("skipping missing subrepository: %s\n")
2264 2264 % join(subpath))
2265 2265
2266 2266 if not opts.get('dry_run'):
2267 2267 rejected = wctx.add(names, prefix)
2268 2268 bad.extend(f for f in rejected if f in match.files())
2269 2269 return bad
2270 2270
2271 2271 def forget(ui, repo, match, prefix, explicitonly):
2272 2272 join = lambda f: os.path.join(prefix, f)
2273 2273 bad = []
2274 2274 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2275 2275 wctx = repo[None]
2276 2276 forgot = []
2277 2277
2278 2278 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2279 2279 forget = sorted(s[0] + s[1] + s[3] + s[6])
2280 2280 if explicitonly:
2281 2281 forget = [f for f in forget if match.exact(f)]
2282 2282
2283 2283 for subpath in sorted(wctx.substate):
2284 2284 sub = wctx.sub(subpath)
2285 2285 try:
2286 2286 submatch = matchmod.subdirmatcher(subpath, match)
2287 2287 subbad, subforgot = sub.forget(submatch, prefix)
2288 2288 bad.extend([subpath + '/' + f for f in subbad])
2289 2289 forgot.extend([subpath + '/' + f for f in subforgot])
2290 2290 except error.LookupError:
2291 2291 ui.status(_("skipping missing subrepository: %s\n")
2292 2292 % join(subpath))
2293 2293
2294 2294 if not explicitonly:
2295 2295 for f in match.files():
2296 2296 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2297 2297 if f not in forgot:
2298 2298 if repo.wvfs.exists(f):
2299 2299 # Don't complain if the exact case match wasn't given.
2300 2300 # But don't do this until after checking 'forgot', so
2301 2301 # that subrepo files aren't normalized, and this op is
2302 2302 # purely from data cached by the status walk above.
2303 2303 if repo.dirstate.normalize(f) in repo.dirstate:
2304 2304 continue
2305 2305 ui.warn(_('not removing %s: '
2306 2306 'file is already untracked\n')
2307 2307 % match.rel(f))
2308 2308 bad.append(f)
2309 2309
2310 2310 for f in forget:
2311 2311 if ui.verbose or not match.exact(f):
2312 2312 ui.status(_('removing %s\n') % match.rel(f))
2313 2313
2314 2314 rejected = wctx.forget(forget, prefix)
2315 2315 bad.extend(f for f in rejected if f in match.files())
2316 2316 forgot.extend(f for f in forget if f not in rejected)
2317 2317 return bad, forgot
2318 2318
2319 2319 def files(ui, ctx, m, fm, fmt, subrepos):
2320 2320 rev = ctx.rev()
2321 2321 ret = 1
2322 2322 ds = ctx.repo().dirstate
2323 2323
2324 2324 for f in ctx.matches(m):
2325 2325 if rev is None and ds[f] == 'r':
2326 2326 continue
2327 2327 fm.startitem()
2328 2328 if ui.verbose:
2329 2329 fc = ctx[f]
2330 2330 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2331 2331 fm.data(abspath=f)
2332 2332 fm.write('path', fmt, m.rel(f))
2333 2333 ret = 0
2334 2334
2335 2335 for subpath in sorted(ctx.substate):
2336 2336 def matchessubrepo(subpath):
2337 2337 return (m.always() or m.exact(subpath)
2338 2338 or any(f.startswith(subpath + '/') for f in m.files()))
2339 2339
2340 2340 if subrepos or matchessubrepo(subpath):
2341 2341 sub = ctx.sub(subpath)
2342 2342 try:
2343 2343 submatch = matchmod.subdirmatcher(subpath, m)
2344 2344 if sub.printfiles(ui, submatch, fm, fmt, subrepos) == 0:
2345 2345 ret = 0
2346 2346 except error.LookupError:
2347 2347 ui.status(_("skipping missing subrepository: %s\n")
2348 2348 % m.abs(subpath))
2349 2349
2350 2350 return ret
2351 2351
2352 2352 def remove(ui, repo, m, prefix, after, force, subrepos):
2353 2353 join = lambda f: os.path.join(prefix, f)
2354 2354 ret = 0
2355 2355 s = repo.status(match=m, clean=True)
2356 2356 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2357 2357
2358 2358 wctx = repo[None]
2359 2359
2360 2360 for subpath in sorted(wctx.substate):
2361 2361 def matchessubrepo(matcher, subpath):
2362 2362 if matcher.exact(subpath):
2363 2363 return True
2364 2364 for f in matcher.files():
2365 2365 if f.startswith(subpath):
2366 2366 return True
2367 2367 return False
2368 2368
2369 2369 if subrepos or matchessubrepo(m, subpath):
2370 2370 sub = wctx.sub(subpath)
2371 2371 try:
2372 2372 submatch = matchmod.subdirmatcher(subpath, m)
2373 2373 if sub.removefiles(submatch, prefix, after, force, subrepos):
2374 2374 ret = 1
2375 2375 except error.LookupError:
2376 2376 ui.status(_("skipping missing subrepository: %s\n")
2377 2377 % join(subpath))
2378 2378
2379 2379 # warn about failure to delete explicit files/dirs
2380 2380 deleteddirs = util.dirs(deleted)
2381 2381 for f in m.files():
2382 2382 def insubrepo():
2383 2383 for subpath in wctx.substate:
2384 2384 if f.startswith(subpath):
2385 2385 return True
2386 2386 return False
2387 2387
2388 2388 isdir = f in deleteddirs or wctx.hasdir(f)
2389 2389 if f in repo.dirstate or isdir or f == '.' or insubrepo():
2390 2390 continue
2391 2391
2392 2392 if repo.wvfs.exists(f):
2393 2393 if repo.wvfs.isdir(f):
2394 2394 ui.warn(_('not removing %s: no tracked files\n')
2395 2395 % m.rel(f))
2396 2396 else:
2397 2397 ui.warn(_('not removing %s: file is untracked\n')
2398 2398 % m.rel(f))
2399 2399 # missing files will generate a warning elsewhere
2400 2400 ret = 1
2401 2401
2402 2402 if force:
2403 2403 list = modified + deleted + clean + added
2404 2404 elif after:
2405 2405 list = deleted
2406 2406 for f in modified + added + clean:
2407 2407 ui.warn(_('not removing %s: file still exists\n') % m.rel(f))
2408 2408 ret = 1
2409 2409 else:
2410 2410 list = deleted + clean
2411 2411 for f in modified:
2412 2412 ui.warn(_('not removing %s: file is modified (use -f'
2413 2413 ' to force removal)\n') % m.rel(f))
2414 2414 ret = 1
2415 2415 for f in added:
2416 2416 ui.warn(_('not removing %s: file has been marked for add'
2417 2417 ' (use forget to undo)\n') % m.rel(f))
2418 2418 ret = 1
2419 2419
2420 2420 for f in sorted(list):
2421 2421 if ui.verbose or not m.exact(f):
2422 2422 ui.status(_('removing %s\n') % m.rel(f))
2423 2423
2424 2424 with repo.wlock():
2425 2425 if not after:
2426 2426 for f in list:
2427 2427 if f in added:
2428 2428 continue # we never unlink added files on remove
2429 2429 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
2430 2430 repo[None].forget(list)
2431 2431
2432 2432 return ret
2433 2433
2434 2434 def cat(ui, repo, ctx, matcher, prefix, **opts):
2435 2435 err = 1
2436 2436
2437 2437 def write(path):
2438 2438 fp = makefileobj(repo, opts.get('output'), ctx.node(),
2439 2439 pathname=os.path.join(prefix, path))
2440 2440 data = ctx[path].data()
2441 2441 if opts.get('decode'):
2442 2442 data = repo.wwritedata(path, data)
2443 2443 fp.write(data)
2444 2444 fp.close()
2445 2445
2446 2446 # Automation often uses hg cat on single files, so special case it
2447 2447 # for performance to avoid the cost of parsing the manifest.
2448 2448 if len(matcher.files()) == 1 and not matcher.anypats():
2449 2449 file = matcher.files()[0]
2450 2450 mf = repo.manifest
2451 2451 mfnode = ctx.manifestnode()
2452 2452 if mfnode and mf.find(mfnode, file)[0]:
2453 2453 write(file)
2454 2454 return 0
2455 2455
2456 2456 # Don't warn about "missing" files that are really in subrepos
2457 2457 def badfn(path, msg):
2458 2458 for subpath in ctx.substate:
2459 2459 if path.startswith(subpath):
2460 2460 return
2461 2461 matcher.bad(path, msg)
2462 2462
2463 2463 for abs in ctx.walk(matchmod.badmatch(matcher, badfn)):
2464 2464 write(abs)
2465 2465 err = 0
2466 2466
2467 2467 for subpath in sorted(ctx.substate):
2468 2468 sub = ctx.sub(subpath)
2469 2469 try:
2470 2470 submatch = matchmod.subdirmatcher(subpath, matcher)
2471 2471
2472 2472 if not sub.cat(submatch, os.path.join(prefix, sub._path),
2473 2473 **opts):
2474 2474 err = 0
2475 2475 except error.RepoLookupError:
2476 2476 ui.status(_("skipping missing subrepository: %s\n")
2477 2477 % os.path.join(prefix, subpath))
2478 2478
2479 2479 return err
2480 2480
2481 2481 def commit(ui, repo, commitfunc, pats, opts):
2482 2482 '''commit the specified files or all outstanding changes'''
2483 2483 date = opts.get('date')
2484 2484 if date:
2485 2485 opts['date'] = util.parsedate(date)
2486 2486 message = logmessage(ui, opts)
2487 2487 matcher = scmutil.match(repo[None], pats, opts)
2488 2488
2489 2489 # extract addremove carefully -- this function can be called from a command
2490 2490 # that doesn't support addremove
2491 2491 if opts.get('addremove'):
2492 2492 if scmutil.addremove(repo, matcher, "", opts) != 0:
2493 2493 raise error.Abort(
2494 2494 _("failed to mark all new/missing files as added/removed"))
2495 2495
2496 2496 return commitfunc(ui, repo, message, matcher, opts)
2497 2497
2498 2498 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2499 2499 # avoid cycle context -> subrepo -> cmdutil
2500 2500 import context
2501 2501
2502 2502 # amend will reuse the existing user if not specified, but the obsolete
2503 2503 # marker creation requires that the current user's name is specified.
2504 2504 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2505 2505 ui.username() # raise exception if username not set
2506 2506
2507 2507 ui.note(_('amending changeset %s\n') % old)
2508 2508 base = old.p1()
2509 2509 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2510 2510
2511 2511 wlock = lock = newid = None
2512 2512 try:
2513 2513 wlock = repo.wlock()
2514 2514 lock = repo.lock()
2515 2515 with repo.transaction('amend') as tr:
2516 2516 # See if we got a message from -m or -l, if not, open the editor
2517 2517 # with the message of the changeset to amend
2518 2518 message = logmessage(ui, opts)
2519 2519 # ensure logfile does not conflict with later enforcement of the
2520 2520 # message. potential logfile content has been processed by
2521 2521 # `logmessage` anyway.
2522 2522 opts.pop('logfile')
2523 2523 # First, do a regular commit to record all changes in the working
2524 2524 # directory (if there are any)
2525 2525 ui.callhooks = False
2526 2526 activebookmark = repo._bookmarks.active
2527 2527 try:
2528 2528 repo._bookmarks.active = None
2529 2529 opts['message'] = 'temporary amend commit for %s' % old
2530 2530 node = commit(ui, repo, commitfunc, pats, opts)
2531 2531 finally:
2532 2532 repo._bookmarks.active = activebookmark
2533 2533 repo._bookmarks.recordchange(tr)
2534 2534 ui.callhooks = True
2535 2535 ctx = repo[node]
2536 2536
2537 2537 # Participating changesets:
2538 2538 #
2539 2539 # node/ctx o - new (intermediate) commit that contains changes
2540 2540 # | from working dir to go into amending commit
2541 2541 # | (or a workingctx if there were no changes)
2542 2542 # |
2543 2543 # old o - changeset to amend
2544 2544 # |
2545 2545 # base o - parent of amending changeset
2546 2546
2547 2547 # Update extra dict from amended commit (e.g. to preserve graft
2548 2548 # source)
2549 2549 extra.update(old.extra())
2550 2550
2551 2551 # Also update it from the intermediate commit or from the wctx
2552 2552 extra.update(ctx.extra())
2553 2553
2554 2554 if len(old.parents()) > 1:
2555 2555 # ctx.files() isn't reliable for merges, so fall back to the
2556 2556 # slower repo.status() method
2557 2557 files = set([fn for st in repo.status(base, old)[:3]
2558 2558 for fn in st])
2559 2559 else:
2560 2560 files = set(old.files())
2561 2561
2562 2562 # Second, we use either the commit we just did, or if there were no
2563 2563 # changes the parent of the working directory as the version of the
2564 2564 # files in the final amend commit
2565 2565 if node:
2566 2566 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2567 2567
2568 2568 user = ctx.user()
2569 2569 date = ctx.date()
2570 2570 # Recompute copies (avoid recording a -> b -> a)
2571 2571 copied = copies.pathcopies(base, ctx)
2572 2572 if old.p2:
2573 2573 copied.update(copies.pathcopies(old.p2(), ctx))
2574 2574
2575 2575 # Prune files which were reverted by the updates: if old
2576 2576 # introduced file X and our intermediate commit, node,
2577 2577 # renamed that file, then those two files are the same and
2578 2578 # we can discard X from our list of files. Likewise if X
2579 2579 # was deleted, it's no longer relevant
2580 2580 files.update(ctx.files())
2581 2581
2582 2582 def samefile(f):
2583 2583 if f in ctx.manifest():
2584 2584 a = ctx.filectx(f)
2585 2585 if f in base.manifest():
2586 2586 b = base.filectx(f)
2587 2587 return (not a.cmp(b)
2588 2588 and a.flags() == b.flags())
2589 2589 else:
2590 2590 return False
2591 2591 else:
2592 2592 return f not in base.manifest()
2593 2593 files = [f for f in files if not samefile(f)]
2594 2594
2595 2595 def filectxfn(repo, ctx_, path):
2596 2596 try:
2597 2597 fctx = ctx[path]
2598 2598 flags = fctx.flags()
2599 2599 mctx = context.memfilectx(repo,
2600 2600 fctx.path(), fctx.data(),
2601 2601 islink='l' in flags,
2602 2602 isexec='x' in flags,
2603 2603 copied=copied.get(path))
2604 2604 return mctx
2605 2605 except KeyError:
2606 2606 return None
2607 2607 else:
2608 2608 ui.note(_('copying changeset %s to %s\n') % (old, base))
2609 2609
2610 2610 # Use version of files as in the old cset
2611 2611 def filectxfn(repo, ctx_, path):
2612 2612 try:
2613 2613 return old.filectx(path)
2614 2614 except KeyError:
2615 2615 return None
2616 2616
2617 2617 user = opts.get('user') or old.user()
2618 2618 date = opts.get('date') or old.date()
2619 2619 editform = mergeeditform(old, 'commit.amend')
2620 2620 editor = getcommiteditor(editform=editform, **opts)
2621 2621 if not message:
2622 2622 editor = getcommiteditor(edit=True, editform=editform)
2623 2623 message = old.description()
2624 2624
2625 2625 pureextra = extra.copy()
2626 2626 extra['amend_source'] = old.hex()
2627 2627
2628 2628 new = context.memctx(repo,
2629 2629 parents=[base.node(), old.p2().node()],
2630 2630 text=message,
2631 2631 files=files,
2632 2632 filectxfn=filectxfn,
2633 2633 user=user,
2634 2634 date=date,
2635 2635 extra=extra,
2636 2636 editor=editor)
2637 2637
2638 2638 newdesc = changelog.stripdesc(new.description())
2639 2639 if ((not node)
2640 2640 and newdesc == old.description()
2641 2641 and user == old.user()
2642 2642 and date == old.date()
2643 2643 and pureextra == old.extra()):
2644 2644 # nothing changed. continuing here would create a new node
2645 2645 # anyway because of the amend_source noise.
2646 2646 #
2647 2647 # This not what we expect from amend.
2648 2648 return old.node()
2649 2649
2650 2650 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2651 2651 try:
2652 2652 if opts.get('secret'):
2653 2653 commitphase = 'secret'
2654 2654 else:
2655 2655 commitphase = old.phase()
2656 2656 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2657 2657 newid = repo.commitctx(new)
2658 2658 finally:
2659 2659 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2660 2660 if newid != old.node():
2661 2661 # Reroute the working copy parent to the new changeset
2662 2662 repo.setparents(newid, nullid)
2663 2663
2664 2664 # Move bookmarks from old parent to amend commit
2665 2665 bms = repo.nodebookmarks(old.node())
2666 2666 if bms:
2667 2667 marks = repo._bookmarks
2668 2668 for bm in bms:
2669 2669 ui.debug('moving bookmarks %r from %s to %s\n' %
2670 2670 (marks, old.hex(), hex(newid)))
2671 2671 marks[bm] = newid
2672 2672 marks.recordchange(tr)
2673 2673 #commit the whole amend process
2674 2674 if createmarkers:
2675 2675 # mark the new changeset as successor of the rewritten one
2676 2676 new = repo[newid]
2677 2677 obs = [(old, (new,))]
2678 2678 if node:
2679 2679 obs.append((ctx, ()))
2680 2680
2681 2681 obsolete.createmarkers(repo, obs)
2682 2682 if not createmarkers and newid != old.node():
2683 2683 # Strip the intermediate commit (if there was one) and the amended
2684 2684 # commit
2685 2685 if node:
2686 2686 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2687 2687 ui.note(_('stripping amended changeset %s\n') % old)
2688 2688 repair.strip(ui, repo, old.node(), topic='amend-backup')
2689 2689 finally:
2690 2690 lockmod.release(lock, wlock)
2691 2691 return newid
2692 2692
2693 2693 def commiteditor(repo, ctx, subs, editform=''):
2694 2694 if ctx.description():
2695 2695 return ctx.description()
2696 2696 return commitforceeditor(repo, ctx, subs, editform=editform,
2697 2697 unchangedmessagedetection=True)
2698 2698
2699 2699 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2700 2700 editform='', unchangedmessagedetection=False):
2701 2701 if not extramsg:
2702 2702 extramsg = _("Leave message empty to abort commit.")
2703 2703
2704 2704 forms = [e for e in editform.split('.') if e]
2705 2705 forms.insert(0, 'changeset')
2706 2706 templatetext = None
2707 2707 while forms:
2708 2708 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2709 2709 if tmpl:
2710 2710 templatetext = committext = buildcommittemplate(
2711 2711 repo, ctx, subs, extramsg, tmpl)
2712 2712 break
2713 2713 forms.pop()
2714 2714 else:
2715 2715 committext = buildcommittext(repo, ctx, subs, extramsg)
2716 2716
2717 2717 # run editor in the repository root
2718 2718 olddir = os.getcwd()
2719 2719 os.chdir(repo.root)
2720 2720
2721 2721 # make in-memory changes visible to external process
2722 2722 tr = repo.currenttransaction()
2723 2723 repo.dirstate.write(tr)
2724 2724 pending = tr and tr.writepending() and repo.root
2725 2725
2726 2726 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2727 2727 editform=editform, pending=pending)
2728 2728 text = re.sub("(?m)^HG:.*(\n|$)", "", editortext)
2729 2729 os.chdir(olddir)
2730 2730
2731 2731 if finishdesc:
2732 2732 text = finishdesc(text)
2733 2733 if not text.strip():
2734 2734 raise error.Abort(_("empty commit message"))
2735 2735 if unchangedmessagedetection and editortext == templatetext:
2736 2736 raise error.Abort(_("commit message unchanged"))
2737 2737
2738 2738 return text
2739 2739
2740 2740 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2741 2741 ui = repo.ui
2742 2742 tmpl, mapfile = gettemplate(ui, tmpl, None)
2743 2743
2744 2744 try:
2745 2745 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2746 2746 except SyntaxError as inst:
2747 2747 raise error.Abort(inst.args[0])
2748 2748
2749 2749 for k, v in repo.ui.configitems('committemplate'):
2750 2750 if k != 'changeset':
2751 2751 t.t.cache[k] = v
2752 2752
2753 2753 if not extramsg:
2754 2754 extramsg = '' # ensure that extramsg is string
2755 2755
2756 2756 ui.pushbuffer()
2757 2757 t.show(ctx, extramsg=extramsg)
2758 2758 return ui.popbuffer()
2759 2759
2760 2760 def hgprefix(msg):
2761 2761 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2762 2762
2763 2763 def buildcommittext(repo, ctx, subs, extramsg):
2764 2764 edittext = []
2765 2765 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2766 2766 if ctx.description():
2767 2767 edittext.append(ctx.description())
2768 2768 edittext.append("")
2769 2769 edittext.append("") # Empty line between message and comments.
2770 2770 edittext.append(hgprefix(_("Enter commit message."
2771 2771 " Lines beginning with 'HG:' are removed.")))
2772 2772 edittext.append(hgprefix(extramsg))
2773 2773 edittext.append("HG: --")
2774 2774 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2775 2775 if ctx.p2():
2776 2776 edittext.append(hgprefix(_("branch merge")))
2777 2777 if ctx.branch():
2778 2778 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2779 2779 if bookmarks.isactivewdirparent(repo):
2780 2780 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2781 2781 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2782 2782 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2783 2783 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2784 2784 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2785 2785 if not added and not modified and not removed:
2786 2786 edittext.append(hgprefix(_("no files changed")))
2787 2787 edittext.append("")
2788 2788
2789 2789 return "\n".join(edittext)
2790 2790
2791 2791 def commitstatus(repo, node, branch, bheads=None, opts=None):
2792 2792 if opts is None:
2793 2793 opts = {}
2794 2794 ctx = repo[node]
2795 2795 parents = ctx.parents()
2796 2796
2797 2797 if (not opts.get('amend') and bheads and node not in bheads and not
2798 2798 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2799 2799 repo.ui.status(_('created new head\n'))
2800 2800 # The message is not printed for initial roots. For the other
2801 2801 # changesets, it is printed in the following situations:
2802 2802 #
2803 2803 # Par column: for the 2 parents with ...
2804 2804 # N: null or no parent
2805 2805 # B: parent is on another named branch
2806 2806 # C: parent is a regular non head changeset
2807 2807 # H: parent was a branch head of the current branch
2808 2808 # Msg column: whether we print "created new head" message
2809 2809 # In the following, it is assumed that there already exists some
2810 2810 # initial branch heads of the current branch, otherwise nothing is
2811 2811 # printed anyway.
2812 2812 #
2813 2813 # Par Msg Comment
2814 2814 # N N y additional topo root
2815 2815 #
2816 2816 # B N y additional branch root
2817 2817 # C N y additional topo head
2818 2818 # H N n usual case
2819 2819 #
2820 2820 # B B y weird additional branch root
2821 2821 # C B y branch merge
2822 2822 # H B n merge with named branch
2823 2823 #
2824 2824 # C C y additional head from merge
2825 2825 # C H n merge with a head
2826 2826 #
2827 2827 # H H n head merge: head count decreases
2828 2828
2829 2829 if not opts.get('close_branch'):
2830 2830 for r in parents:
2831 2831 if r.closesbranch() and r.branch() == branch:
2832 2832 repo.ui.status(_('reopening closed branch head %d\n') % r)
2833 2833
2834 2834 if repo.ui.debugflag:
2835 2835 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2836 2836 elif repo.ui.verbose:
2837 2837 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2838 2838
2839 2839 def postcommitstatus(repo, pats, opts):
2840 2840 return repo.status(match=scmutil.match(repo[None], pats, opts))
2841 2841
2842 2842 def revert(ui, repo, ctx, parents, *pats, **opts):
2843 2843 parent, p2 = parents
2844 2844 node = ctx.node()
2845 2845
2846 2846 mf = ctx.manifest()
2847 2847 if node == p2:
2848 2848 parent = p2
2849 2849 if node == parent:
2850 2850 pmf = mf
2851 2851 else:
2852 2852 pmf = None
2853 2853
2854 2854 # need all matching names in dirstate and manifest of target rev,
2855 2855 # so have to walk both. do not print errors if files exist in one
2856 2856 # but not other. in both cases, filesets should be evaluated against
2857 2857 # workingctx to get consistent result (issue4497). this means 'set:**'
2858 2858 # cannot be used to select missing files from target rev.
2859 2859
2860 2860 # `names` is a mapping for all elements in working copy and target revision
2861 2861 # The mapping is in the form:
2862 2862 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2863 2863 names = {}
2864 2864
2865 2865 with repo.wlock():
2866 2866 ## filling of the `names` mapping
2867 2867 # walk dirstate to fill `names`
2868 2868
2869 2869 interactive = opts.get('interactive', False)
2870 2870 wctx = repo[None]
2871 2871 m = scmutil.match(wctx, pats, opts)
2872 2872
2873 2873 # we'll need this later
2874 2874 targetsubs = sorted(s for s in wctx.substate if m(s))
2875 2875
2876 2876 if not m.always():
2877 2877 for abs in repo.walk(matchmod.badmatch(m, lambda x, y: False)):
2878 2878 names[abs] = m.rel(abs), m.exact(abs)
2879 2879
2880 2880 # walk target manifest to fill `names`
2881 2881
2882 2882 def badfn(path, msg):
2883 2883 if path in names:
2884 2884 return
2885 2885 if path in ctx.substate:
2886 2886 return
2887 2887 path_ = path + '/'
2888 2888 for f in names:
2889 2889 if f.startswith(path_):
2890 2890 return
2891 2891 ui.warn("%s: %s\n" % (m.rel(path), msg))
2892 2892
2893 2893 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
2894 2894 if abs not in names:
2895 2895 names[abs] = m.rel(abs), m.exact(abs)
2896 2896
2897 2897 # Find status of all file in `names`.
2898 2898 m = scmutil.matchfiles(repo, names)
2899 2899
2900 2900 changes = repo.status(node1=node, match=m,
2901 2901 unknown=True, ignored=True, clean=True)
2902 2902 else:
2903 2903 changes = repo.status(node1=node, match=m)
2904 2904 for kind in changes:
2905 2905 for abs in kind:
2906 2906 names[abs] = m.rel(abs), m.exact(abs)
2907 2907
2908 2908 m = scmutil.matchfiles(repo, names)
2909 2909
2910 2910 modified = set(changes.modified)
2911 2911 added = set(changes.added)
2912 2912 removed = set(changes.removed)
2913 2913 _deleted = set(changes.deleted)
2914 2914 unknown = set(changes.unknown)
2915 2915 unknown.update(changes.ignored)
2916 2916 clean = set(changes.clean)
2917 2917 modadded = set()
2918 2918
2919 2919 # split between files known in target manifest and the others
2920 2920 smf = set(mf)
2921 2921
2922 2922 # determine the exact nature of the deleted changesets
2923 2923 deladded = _deleted - smf
2924 2924 deleted = _deleted - deladded
2925 2925
2926 2926 # We need to account for the state of the file in the dirstate,
2927 2927 # even when we revert against something else than parent. This will
2928 2928 # slightly alter the behavior of revert (doing back up or not, delete
2929 2929 # or just forget etc).
2930 2930 if parent == node:
2931 2931 dsmodified = modified
2932 2932 dsadded = added
2933 2933 dsremoved = removed
2934 2934 # store all local modifications, useful later for rename detection
2935 2935 localchanges = dsmodified | dsadded
2936 2936 modified, added, removed = set(), set(), set()
2937 2937 else:
2938 2938 changes = repo.status(node1=parent, match=m)
2939 2939 dsmodified = set(changes.modified)
2940 2940 dsadded = set(changes.added)
2941 2941 dsremoved = set(changes.removed)
2942 2942 # store all local modifications, useful later for rename detection
2943 2943 localchanges = dsmodified | dsadded
2944 2944
2945 2945 # only take into account for removes between wc and target
2946 2946 clean |= dsremoved - removed
2947 2947 dsremoved &= removed
2948 2948 # distinct between dirstate remove and other
2949 2949 removed -= dsremoved
2950 2950
2951 2951 modadded = added & dsmodified
2952 2952 added -= modadded
2953 2953
2954 2954 # tell newly modified apart.
2955 2955 dsmodified &= modified
2956 2956 dsmodified |= modified & dsadded # dirstate added may needs backup
2957 2957 modified -= dsmodified
2958 2958
2959 2959 # We need to wait for some post-processing to update this set
2960 2960 # before making the distinction. The dirstate will be used for
2961 2961 # that purpose.
2962 2962 dsadded = added
2963 2963
2964 2964 # in case of merge, files that are actually added can be reported as
2965 2965 # modified, we need to post process the result
2966 2966 if p2 != nullid:
2967 2967 if pmf is None:
2968 2968 # only need parent manifest in the merge case,
2969 2969 # so do not read by default
2970 2970 pmf = repo[parent].manifest()
2971 2971 mergeadd = dsmodified - set(pmf)
2972 2972 dsadded |= mergeadd
2973 2973 dsmodified -= mergeadd
2974 2974
2975 2975 # if f is a rename, update `names` to also revert the source
2976 2976 cwd = repo.getcwd()
2977 2977 for f in localchanges:
2978 2978 src = repo.dirstate.copied(f)
2979 2979 # XXX should we check for rename down to target node?
2980 2980 if src and src not in names and repo.dirstate[src] == 'r':
2981 2981 dsremoved.add(src)
2982 2982 names[src] = (repo.pathto(src, cwd), True)
2983 2983
2984 2984 # distinguish between file to forget and the other
2985 2985 added = set()
2986 2986 for abs in dsadded:
2987 2987 if repo.dirstate[abs] != 'a':
2988 2988 added.add(abs)
2989 2989 dsadded -= added
2990 2990
2991 2991 for abs in deladded:
2992 2992 if repo.dirstate[abs] == 'a':
2993 2993 dsadded.add(abs)
2994 2994 deladded -= dsadded
2995 2995
2996 2996 # For files marked as removed, we check if an unknown file is present at
2997 2997 # the same path. If a such file exists it may need to be backed up.
2998 2998 # Making the distinction at this stage helps have simpler backup
2999 2999 # logic.
3000 3000 removunk = set()
3001 3001 for abs in removed:
3002 3002 target = repo.wjoin(abs)
3003 3003 if os.path.lexists(target):
3004 3004 removunk.add(abs)
3005 3005 removed -= removunk
3006 3006
3007 3007 dsremovunk = set()
3008 3008 for abs in dsremoved:
3009 3009 target = repo.wjoin(abs)
3010 3010 if os.path.lexists(target):
3011 3011 dsremovunk.add(abs)
3012 3012 dsremoved -= dsremovunk
3013 3013
3014 3014 # action to be actually performed by revert
3015 3015 # (<list of file>, message>) tuple
3016 3016 actions = {'revert': ([], _('reverting %s\n')),
3017 3017 'add': ([], _('adding %s\n')),
3018 3018 'remove': ([], _('removing %s\n')),
3019 3019 'drop': ([], _('removing %s\n')),
3020 3020 'forget': ([], _('forgetting %s\n')),
3021 3021 'undelete': ([], _('undeleting %s\n')),
3022 3022 'noop': (None, _('no changes needed to %s\n')),
3023 3023 'unknown': (None, _('file not managed: %s\n')),
3024 3024 }
3025 3025
3026 3026 # "constant" that convey the backup strategy.
3027 3027 # All set to `discard` if `no-backup` is set do avoid checking
3028 3028 # no_backup lower in the code.
3029 3029 # These values are ordered for comparison purposes
3030 3030 backup = 2 # unconditionally do backup
3031 3031 check = 1 # check if the existing file differs from target
3032 3032 discard = 0 # never do backup
3033 3033 if opts.get('no_backup'):
3034 3034 backup = check = discard
3035 3035
3036 3036 backupanddel = actions['remove']
3037 3037 if not opts.get('no_backup'):
3038 3038 backupanddel = actions['drop']
3039 3039
3040 3040 disptable = (
3041 3041 # dispatch table:
3042 3042 # file state
3043 3043 # action
3044 3044 # make backup
3045 3045
3046 3046 ## Sets that results that will change file on disk
3047 3047 # Modified compared to target, no local change
3048 3048 (modified, actions['revert'], discard),
3049 3049 # Modified compared to target, but local file is deleted
3050 3050 (deleted, actions['revert'], discard),
3051 3051 # Modified compared to target, local change
3052 3052 (dsmodified, actions['revert'], backup),
3053 3053 # Added since target
3054 3054 (added, actions['remove'], discard),
3055 3055 # Added in working directory
3056 3056 (dsadded, actions['forget'], discard),
3057 3057 # Added since target, have local modification
3058 3058 (modadded, backupanddel, backup),
3059 3059 # Added since target but file is missing in working directory
3060 3060 (deladded, actions['drop'], discard),
3061 3061 # Removed since target, before working copy parent
3062 3062 (removed, actions['add'], discard),
3063 3063 # Same as `removed` but an unknown file exists at the same path
3064 3064 (removunk, actions['add'], check),
3065 3065 # Removed since targe, marked as such in working copy parent
3066 3066 (dsremoved, actions['undelete'], discard),
3067 3067 # Same as `dsremoved` but an unknown file exists at the same path
3068 3068 (dsremovunk, actions['undelete'], check),
3069 3069 ## the following sets does not result in any file changes
3070 3070 # File with no modification
3071 3071 (clean, actions['noop'], discard),
3072 3072 # Existing file, not tracked anywhere
3073 3073 (unknown, actions['unknown'], discard),
3074 3074 )
3075 3075
3076 3076 for abs, (rel, exact) in sorted(names.items()):
3077 3077 # target file to be touch on disk (relative to cwd)
3078 3078 target = repo.wjoin(abs)
3079 3079 # search the entry in the dispatch table.
3080 3080 # if the file is in any of these sets, it was touched in the working
3081 3081 # directory parent and we are sure it needs to be reverted.
3082 3082 for table, (xlist, msg), dobackup in disptable:
3083 3083 if abs not in table:
3084 3084 continue
3085 3085 if xlist is not None:
3086 3086 xlist.append(abs)
3087 3087 if dobackup and (backup <= dobackup
3088 3088 or wctx[abs].cmp(ctx[abs])):
3089 3089 bakname = scmutil.origpath(ui, repo, rel)
3090 3090 ui.note(_('saving current version of %s as %s\n') %
3091 3091 (rel, bakname))
3092 3092 if not opts.get('dry_run'):
3093 3093 if interactive:
3094 3094 util.copyfile(target, bakname)
3095 3095 else:
3096 3096 util.rename(target, bakname)
3097 3097 if ui.verbose or not exact:
3098 3098 if not isinstance(msg, basestring):
3099 3099 msg = msg(abs)
3100 3100 ui.status(msg % rel)
3101 3101 elif exact:
3102 3102 ui.warn(msg % rel)
3103 3103 break
3104 3104
3105 3105 if not opts.get('dry_run'):
3106 3106 needdata = ('revert', 'add', 'undelete')
3107 3107 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3108 3108 _performrevert(repo, parents, ctx, actions, interactive)
3109 3109
3110 3110 if targetsubs:
3111 3111 # Revert the subrepos on the revert list
3112 3112 for sub in targetsubs:
3113 3113 try:
3114 3114 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3115 3115 except KeyError:
3116 3116 raise error.Abort("subrepository '%s' does not exist in %s!"
3117 3117 % (sub, short(ctx.node())))
3118 3118
3119 3119 def _revertprefetch(repo, ctx, *files):
3120 3120 """Let extension changing the storage layer prefetch content"""
3121 3121 pass
3122 3122
3123 3123 def _performrevert(repo, parents, ctx, actions, interactive=False):
3124 3124 """function that actually perform all the actions computed for revert
3125 3125
3126 3126 This is an independent function to let extension to plug in and react to
3127 3127 the imminent revert.
3128 3128
3129 3129 Make sure you have the working directory locked when calling this function.
3130 3130 """
3131 3131 parent, p2 = parents
3132 3132 node = ctx.node()
3133 3133 excluded_files = []
3134 3134 matcher_opts = {"exclude": excluded_files}
3135 3135
3136 3136 def checkout(f):
3137 3137 fc = ctx[f]
3138 3138 repo.wwrite(f, fc.data(), fc.flags())
3139 3139
3140 3140 audit_path = pathutil.pathauditor(repo.root)
3141 3141 for f in actions['forget'][0]:
3142 3142 if interactive:
3143 3143 choice = \
3144 3144 repo.ui.promptchoice(
3145 3145 _("forget added file %s (yn)?$$ &Yes $$ &No")
3146 3146 % f)
3147 3147 if choice == 0:
3148 3148 repo.dirstate.drop(f)
3149 3149 else:
3150 3150 excluded_files.append(repo.wjoin(f))
3151 3151 else:
3152 3152 repo.dirstate.drop(f)
3153 3153 for f in actions['remove'][0]:
3154 3154 audit_path(f)
3155 3155 try:
3156 3156 util.unlinkpath(repo.wjoin(f))
3157 3157 except OSError:
3158 3158 pass
3159 3159 repo.dirstate.remove(f)
3160 3160 for f in actions['drop'][0]:
3161 3161 audit_path(f)
3162 3162 repo.dirstate.remove(f)
3163 3163
3164 3164 normal = None
3165 3165 if node == parent:
3166 3166 # We're reverting to our parent. If possible, we'd like status
3167 3167 # to report the file as clean. We have to use normallookup for
3168 3168 # merges to avoid losing information about merged/dirty files.
3169 3169 if p2 != nullid:
3170 3170 normal = repo.dirstate.normallookup
3171 3171 else:
3172 3172 normal = repo.dirstate.normal
3173 3173
3174 3174 newlyaddedandmodifiedfiles = set()
3175 3175 if interactive:
3176 3176 # Prompt the user for changes to revert
3177 3177 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3178 3178 m = scmutil.match(ctx, torevert, matcher_opts)
3179 3179 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3180 3180 diffopts.nodates = True
3181 3181 diffopts.git = True
3182 3182 reversehunks = repo.ui.configbool('experimental',
3183 3183 'revertalternateinteractivemode',
3184 3184 True)
3185 3185 if reversehunks:
3186 3186 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3187 3187 else:
3188 3188 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3189 3189 originalchunks = patch.parsepatch(diff)
3190 3190
3191 3191 try:
3192 3192
3193 3193 chunks, opts = recordfilter(repo.ui, originalchunks)
3194 3194 if reversehunks:
3195 3195 chunks = patch.reversehunks(chunks)
3196 3196
3197 3197 except patch.PatchError as err:
3198 3198 raise error.Abort(_('error parsing patch: %s') % err)
3199 3199
3200 3200 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3201 3201 # Apply changes
3202 3202 fp = cStringIO.StringIO()
3203 3203 for c in chunks:
3204 3204 c.write(fp)
3205 3205 dopatch = fp.tell()
3206 3206 fp.seek(0)
3207 3207 if dopatch:
3208 3208 try:
3209 3209 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3210 3210 except patch.PatchError as err:
3211 3211 raise error.Abort(str(err))
3212 3212 del fp
3213 3213 else:
3214 3214 for f in actions['revert'][0]:
3215 3215 checkout(f)
3216 3216 if normal:
3217 3217 normal(f)
3218 3218
3219 3219 for f in actions['add'][0]:
3220 3220 # Don't checkout modified files, they are already created by the diff
3221 3221 if f not in newlyaddedandmodifiedfiles:
3222 3222 checkout(f)
3223 3223 repo.dirstate.add(f)
3224 3224
3225 3225 normal = repo.dirstate.normallookup
3226 3226 if node == parent and p2 == nullid:
3227 3227 normal = repo.dirstate.normal
3228 3228 for f in actions['undelete'][0]:
3229 3229 checkout(f)
3230 3230 normal(f)
3231 3231
3232 3232 copied = copies.pathcopies(repo[parent], ctx)
3233 3233
3234 3234 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3235 3235 if f in copied:
3236 3236 repo.dirstate.copy(copied[f], f)
3237 3237
3238 3238 def command(table):
3239 3239 """Returns a function object to be used as a decorator for making commands.
3240 3240
3241 3241 This function receives a command table as its argument. The table should
3242 3242 be a dict.
3243 3243
3244 3244 The returned function can be used as a decorator for adding commands
3245 3245 to that command table. This function accepts multiple arguments to define
3246 3246 a command.
3247 3247
3248 3248 The first argument is the command name.
3249 3249
3250 3250 The options argument is an iterable of tuples defining command arguments.
3251 3251 See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
3252 3252
3253 3253 The synopsis argument defines a short, one line summary of how to use the
3254 3254 command. This shows up in the help output.
3255 3255
3256 3256 The norepo argument defines whether the command does not require a
3257 3257 local repository. Most commands operate against a repository, thus the
3258 3258 default is False.
3259 3259
3260 3260 The optionalrepo argument defines whether the command optionally requires
3261 3261 a local repository.
3262 3262
3263 3263 The inferrepo argument defines whether to try to find a repository from the
3264 3264 command line arguments. If True, arguments will be examined for potential
3265 3265 repository locations. See ``findrepo()``. If a repository is found, it
3266 3266 will be used.
3267 3267 """
3268 3268 def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False,
3269 3269 inferrepo=False):
3270 3270 def decorator(func):
3271 3271 if synopsis:
3272 3272 table[name] = func, list(options), synopsis
3273 3273 else:
3274 3274 table[name] = func, list(options)
3275 3275
3276 3276 if norepo:
3277 3277 # Avoid import cycle.
3278 3278 import commands
3279 3279 commands.norepo += ' %s' % ' '.join(parsealiases(name))
3280 3280
3281 3281 if optionalrepo:
3282 3282 import commands
3283 3283 commands.optionalrepo += ' %s' % ' '.join(parsealiases(name))
3284 3284
3285 3285 if inferrepo:
3286 3286 import commands
3287 3287 commands.inferrepo += ' %s' % ' '.join(parsealiases(name))
3288 3288
3289 3289 return func
3290 3290 return decorator
3291 3291
3292 3292 return cmd
3293 3293
3294 3294 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3295 3295 # commands.outgoing. "missing" is "missing" of the result of
3296 3296 # "findcommonoutgoing()"
3297 3297 outgoinghooks = util.hooks()
3298 3298
3299 3299 # a list of (ui, repo) functions called by commands.summary
3300 3300 summaryhooks = util.hooks()
3301 3301
3302 3302 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3303 3303 #
3304 3304 # functions should return tuple of booleans below, if 'changes' is None:
3305 3305 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3306 3306 #
3307 3307 # otherwise, 'changes' is a tuple of tuples below:
3308 3308 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3309 3309 # - (desturl, destbranch, destpeer, outgoing)
3310 3310 summaryremotehooks = util.hooks()
3311 3311
3312 3312 # A list of state files kept by multistep operations like graft.
3313 3313 # Since graft cannot be aborted, it is considered 'clearable' by update.
3314 3314 # note: bisect is intentionally excluded
3315 3315 # (state file, clearable, allowcommit, error, hint)
3316 3316 unfinishedstates = [
3317 3317 ('graftstate', True, False, _('graft in progress'),
3318 3318 _("use 'hg graft --continue' or 'hg update' to abort")),
3319 3319 ('updatestate', True, False, _('last update was interrupted'),
3320 3320 _("use 'hg update' to get a consistent checkout"))
3321 3321 ]
3322 3322
3323 3323 def checkunfinished(repo, commit=False):
3324 3324 '''Look for an unfinished multistep operation, like graft, and abort
3325 3325 if found. It's probably good to check this right before
3326 3326 bailifchanged().
3327 3327 '''
3328 3328 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3329 3329 if commit and allowcommit:
3330 3330 continue
3331 3331 if repo.vfs.exists(f):
3332 3332 raise error.Abort(msg, hint=hint)
3333 3333
3334 3334 def clearunfinished(repo):
3335 3335 '''Check for unfinished operations (as above), and clear the ones
3336 3336 that are clearable.
3337 3337 '''
3338 3338 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3339 3339 if not clearable and repo.vfs.exists(f):
3340 3340 raise error.Abort(msg, hint=hint)
3341 3341 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3342 3342 if clearable and repo.vfs.exists(f):
3343 3343 util.unlink(repo.join(f))
3344 3344
3345 3345 afterresolvedstates = [
3346 3346 ('graftstate',
3347 3347 _('hg graft --continue')),
3348 3348 ]
3349 3349
3350 def checkafterresolved(repo):
3351 contmsg = _("continue: %s\n")
3350 def howtocontinue(repo):
3351 '''Check for an unfinished operation and return the command to finish
3352 it.
3353
3354 afterresolvedstates tupples define a .hg/{file} and the corresponding
3355 command needed to finish it.
3356
3357 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3358 a boolean.
3359 '''
3360 contmsg = _("continue: %s")
3352 3361 for f, msg in afterresolvedstates:
3353 3362 if repo.vfs.exists(f):
3354 repo.ui.warn(contmsg % msg)
3355 return
3356 repo.ui.note(contmsg % _("hg commit"))
3363 return contmsg % msg, True
3364 workingctx = repo[None]
3365 dirty = any(repo.status()) or any(workingctx.sub(s).dirty()
3366 for s in workingctx.substate)
3367 if dirty:
3368 return contmsg % _("hg commit"), False
3369 return None, None
3370
3371 def checkafterresolved(repo):
3372 '''Inform the user about the next action after completing hg resolve
3373
3374 If there's a matching afterresolvedstates, howtocontinue will yield
3375 repo.ui.warn as the reporter.
3376
3377 Otherwise, it will yield repo.ui.note.
3378 '''
3379 msg, warning = howtocontinue(repo)
3380 if msg is not None:
3381 if warning:
3382 repo.ui.warn("%s\n" % msg)
3383 else:
3384 repo.ui.note("%s\n" % msg)
3385
3386 def wrongtooltocontinue(repo, task):
3387 '''Raise an abort suggesting how to properly continue if there is an
3388 active task.
3389
3390 Uses howtocontinue() to find the active task.
3391
3392 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3393 a hint.
3394 '''
3395 after = howtocontinue(repo)
3396 hint = None
3397 if after[1]:
3398 hint = after[0]
3399 raise error.Abort(_('no %s in progress') % task, hint=hint)
3357 3400
3358 3401 class dirstateguard(object):
3359 3402 '''Restore dirstate at unexpected failure.
3360 3403
3361 3404 At the construction, this class does:
3362 3405
3363 3406 - write current ``repo.dirstate`` out, and
3364 3407 - save ``.hg/dirstate`` into the backup file
3365 3408
3366 3409 This restores ``.hg/dirstate`` from backup file, if ``release()``
3367 3410 is invoked before ``close()``.
3368 3411
3369 3412 This just removes the backup file at ``close()`` before ``release()``.
3370 3413 '''
3371 3414
3372 3415 def __init__(self, repo, name):
3373 3416 self._repo = repo
3374 3417 self._suffix = '.backup.%s.%d' % (name, id(self))
3375 3418 repo.dirstate._savebackup(repo.currenttransaction(), self._suffix)
3376 3419 self._active = True
3377 3420 self._closed = False
3378 3421
3379 3422 def __del__(self):
3380 3423 if self._active: # still active
3381 3424 # this may occur, even if this class is used correctly:
3382 3425 # for example, releasing other resources like transaction
3383 3426 # may raise exception before ``dirstateguard.release`` in
3384 3427 # ``release(tr, ....)``.
3385 3428 self._abort()
3386 3429
3387 3430 def close(self):
3388 3431 if not self._active: # already inactivated
3389 3432 msg = (_("can't close already inactivated backup: dirstate%s")
3390 3433 % self._suffix)
3391 3434 raise error.Abort(msg)
3392 3435
3393 3436 self._repo.dirstate._clearbackup(self._repo.currenttransaction(),
3394 3437 self._suffix)
3395 3438 self._active = False
3396 3439 self._closed = True
3397 3440
3398 3441 def _abort(self):
3399 3442 self._repo.dirstate._restorebackup(self._repo.currenttransaction(),
3400 3443 self._suffix)
3401 3444 self._active = False
3402 3445
3403 3446 def release(self):
3404 3447 if not self._closed:
3405 3448 if not self._active: # already inactivated
3406 3449 msg = (_("can't release already inactivated backup:"
3407 3450 " dirstate%s")
3408 3451 % self._suffix)
3409 3452 raise error.Abort(msg)
3410 3453 self._abort()
General Comments 0
You need to be logged in to leave comments. Login now