##// END OF EJS Templates
cmdutil: stop tryimportone from using dirstateguard (BC)...
FUJIWARA Katsunori -
r26579:dc2b8c00 default
parent child Browse files
Show More
@@ -1,3429 +1,3425
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import hex, bin, nullid, nullrev, short
9 9 from i18n import _
10 10 import os, sys, errno, re, tempfile, cStringIO, shutil
11 11 import util, scmutil, templater, patch, error, templatekw, revlog, copies
12 12 import match as matchmod
13 13 import repair, graphmod, revset, phases, obsolete, pathutil
14 14 import changelog
15 15 import bookmarks
16 16 import encoding
17 17 import formatter
18 18 import crecord as crecordmod
19 19 import lock as lockmod
20 20
21 21 def ishunk(x):
22 22 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
23 23 return isinstance(x, hunkclasses)
24 24
25 25 def newandmodified(chunks, originalchunks):
26 26 newlyaddedandmodifiedfiles = set()
27 27 for chunk in chunks:
28 28 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
29 29 originalchunks:
30 30 newlyaddedandmodifiedfiles.add(chunk.header.filename())
31 31 return newlyaddedandmodifiedfiles
32 32
33 33 def parsealiases(cmd):
34 34 return cmd.lstrip("^").split("|")
35 35
36 36 def setupwrapcolorwrite(ui):
37 37 # wrap ui.write so diff output can be labeled/colorized
38 38 def wrapwrite(orig, *args, **kw):
39 39 label = kw.pop('label', '')
40 40 for chunk, l in patch.difflabel(lambda: args):
41 41 orig(chunk, label=label + l)
42 42
43 43 oldwrite = ui.write
44 44 def wrap(*args, **kwargs):
45 45 return wrapwrite(oldwrite, *args, **kwargs)
46 46 setattr(ui, 'write', wrap)
47 47 return oldwrite
48 48
49 49 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
50 50 if usecurses:
51 51 if testfile:
52 52 recordfn = crecordmod.testdecorator(testfile,
53 53 crecordmod.testchunkselector)
54 54 else:
55 55 recordfn = crecordmod.chunkselector
56 56
57 57 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
58 58
59 59 else:
60 60 return patch.filterpatch(ui, originalhunks, operation)
61 61
62 62 def recordfilter(ui, originalhunks, operation=None):
63 63 """ Prompts the user to filter the originalhunks and return a list of
64 64 selected hunks.
65 65 *operation* is used for ui purposes to indicate the user
66 66 what kind of filtering they are doing: reverting, commiting, shelving, etc.
67 67 *operation* has to be a translated string.
68 68 """
69 69 usecurses = ui.configbool('experimental', 'crecord', False)
70 70 testfile = ui.config('experimental', 'crecordtest', None)
71 71 oldwrite = setupwrapcolorwrite(ui)
72 72 try:
73 73 newchunks = filterchunks(ui, originalhunks, usecurses, testfile,
74 74 operation)
75 75 finally:
76 76 ui.write = oldwrite
77 77 return newchunks
78 78
79 79 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
80 80 filterfn, *pats, **opts):
81 81 import merge as mergemod
82 82
83 83 if not ui.interactive():
84 84 if cmdsuggest:
85 85 msg = _('running non-interactively, use %s instead') % cmdsuggest
86 86 else:
87 87 msg = _('running non-interactively')
88 88 raise util.Abort(msg)
89 89
90 90 # make sure username is set before going interactive
91 91 if not opts.get('user'):
92 92 ui.username() # raise exception, username not provided
93 93
94 94 def recordfunc(ui, repo, message, match, opts):
95 95 """This is generic record driver.
96 96
97 97 Its job is to interactively filter local changes, and
98 98 accordingly prepare working directory into a state in which the
99 99 job can be delegated to a non-interactive commit command such as
100 100 'commit' or 'qrefresh'.
101 101
102 102 After the actual job is done by non-interactive command, the
103 103 working directory is restored to its original state.
104 104
105 105 In the end we'll record interesting changes, and everything else
106 106 will be left in place, so the user can continue working.
107 107 """
108 108
109 109 checkunfinished(repo, commit=True)
110 110 merge = len(repo[None].parents()) > 1
111 111 if merge:
112 112 raise util.Abort(_('cannot partially commit a merge '
113 113 '(use "hg commit" instead)'))
114 114
115 115 status = repo.status(match=match)
116 116 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
117 117 diffopts.nodates = True
118 118 diffopts.git = True
119 119 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
120 120 originalchunks = patch.parsepatch(originaldiff)
121 121
122 122 # 1. filter patch, so we have intending-to apply subset of it
123 123 try:
124 124 chunks = filterfn(ui, originalchunks)
125 125 except patch.PatchError as err:
126 126 raise util.Abort(_('error parsing patch: %s') % err)
127 127
128 128 # We need to keep a backup of files that have been newly added and
129 129 # modified during the recording process because there is a previous
130 130 # version without the edit in the workdir
131 131 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
132 132 contenders = set()
133 133 for h in chunks:
134 134 try:
135 135 contenders.update(set(h.files()))
136 136 except AttributeError:
137 137 pass
138 138
139 139 changed = status.modified + status.added + status.removed
140 140 newfiles = [f for f in changed if f in contenders]
141 141 if not newfiles:
142 142 ui.status(_('no changes to record\n'))
143 143 return 0
144 144
145 145 modified = set(status.modified)
146 146
147 147 # 2. backup changed files, so we can restore them in the end
148 148
149 149 if backupall:
150 150 tobackup = changed
151 151 else:
152 152 tobackup = [f for f in newfiles if f in modified or f in \
153 153 newlyaddedandmodifiedfiles]
154 154 backups = {}
155 155 if tobackup:
156 156 backupdir = repo.join('record-backups')
157 157 try:
158 158 os.mkdir(backupdir)
159 159 except OSError as err:
160 160 if err.errno != errno.EEXIST:
161 161 raise
162 162 try:
163 163 # backup continues
164 164 for f in tobackup:
165 165 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
166 166 dir=backupdir)
167 167 os.close(fd)
168 168 ui.debug('backup %r as %r\n' % (f, tmpname))
169 169 util.copyfile(repo.wjoin(f), tmpname)
170 170 shutil.copystat(repo.wjoin(f), tmpname)
171 171 backups[f] = tmpname
172 172
173 173 fp = cStringIO.StringIO()
174 174 for c in chunks:
175 175 fname = c.filename()
176 176 if fname in backups:
177 177 c.write(fp)
178 178 dopatch = fp.tell()
179 179 fp.seek(0)
180 180
181 181 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
182 182 # 3a. apply filtered patch to clean repo (clean)
183 183 if backups:
184 184 # Equivalent to hg.revert
185 185 choices = lambda key: key in backups
186 186 mergemod.update(repo, repo.dirstate.p1(),
187 187 False, True, choices)
188 188
189 189 # 3b. (apply)
190 190 if dopatch:
191 191 try:
192 192 ui.debug('applying patch\n')
193 193 ui.debug(fp.getvalue())
194 194 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
195 195 except patch.PatchError as err:
196 196 raise util.Abort(str(err))
197 197 del fp
198 198
199 199 # 4. We prepared working directory according to filtered
200 200 # patch. Now is the time to delegate the job to
201 201 # commit/qrefresh or the like!
202 202
203 203 # Make all of the pathnames absolute.
204 204 newfiles = [repo.wjoin(nf) for nf in newfiles]
205 205 return commitfunc(ui, repo, *newfiles, **opts)
206 206 finally:
207 207 # 5. finally restore backed-up files
208 208 try:
209 209 dirstate = repo.dirstate
210 210 for realname, tmpname in backups.iteritems():
211 211 ui.debug('restoring %r to %r\n' % (tmpname, realname))
212 212
213 213 if dirstate[realname] == 'n':
214 214 # without normallookup, restoring timestamp
215 215 # may cause partially committed files
216 216 # to be treated as unmodified
217 217 dirstate.normallookup(realname)
218 218
219 219 util.copyfile(tmpname, repo.wjoin(realname))
220 220 # Our calls to copystat() here and above are a
221 221 # hack to trick any editors that have f open that
222 222 # we haven't modified them.
223 223 #
224 224 # Also note that this racy as an editor could
225 225 # notice the file's mtime before we've finished
226 226 # writing it.
227 227 shutil.copystat(tmpname, repo.wjoin(realname))
228 228 os.unlink(tmpname)
229 229 if tobackup:
230 230 os.rmdir(backupdir)
231 231 except OSError:
232 232 pass
233 233
234 234 def recordinwlock(ui, repo, message, match, opts):
235 235 wlock = repo.wlock()
236 236 try:
237 237 return recordfunc(ui, repo, message, match, opts)
238 238 finally:
239 239 wlock.release()
240 240
241 241 return commit(ui, repo, recordinwlock, pats, opts)
242 242
243 243 def findpossible(cmd, table, strict=False):
244 244 """
245 245 Return cmd -> (aliases, command table entry)
246 246 for each matching command.
247 247 Return debug commands (or their aliases) only if no normal command matches.
248 248 """
249 249 choice = {}
250 250 debugchoice = {}
251 251
252 252 if cmd in table:
253 253 # short-circuit exact matches, "log" alias beats "^log|history"
254 254 keys = [cmd]
255 255 else:
256 256 keys = table.keys()
257 257
258 258 allcmds = []
259 259 for e in keys:
260 260 aliases = parsealiases(e)
261 261 allcmds.extend(aliases)
262 262 found = None
263 263 if cmd in aliases:
264 264 found = cmd
265 265 elif not strict:
266 266 for a in aliases:
267 267 if a.startswith(cmd):
268 268 found = a
269 269 break
270 270 if found is not None:
271 271 if aliases[0].startswith("debug") or found.startswith("debug"):
272 272 debugchoice[found] = (aliases, table[e])
273 273 else:
274 274 choice[found] = (aliases, table[e])
275 275
276 276 if not choice and debugchoice:
277 277 choice = debugchoice
278 278
279 279 return choice, allcmds
280 280
281 281 def findcmd(cmd, table, strict=True):
282 282 """Return (aliases, command table entry) for command string."""
283 283 choice, allcmds = findpossible(cmd, table, strict)
284 284
285 285 if cmd in choice:
286 286 return choice[cmd]
287 287
288 288 if len(choice) > 1:
289 289 clist = choice.keys()
290 290 clist.sort()
291 291 raise error.AmbiguousCommand(cmd, clist)
292 292
293 293 if choice:
294 294 return choice.values()[0]
295 295
296 296 raise error.UnknownCommand(cmd, allcmds)
297 297
298 298 def findrepo(p):
299 299 while not os.path.isdir(os.path.join(p, ".hg")):
300 300 oldp, p = p, os.path.dirname(p)
301 301 if p == oldp:
302 302 return None
303 303
304 304 return p
305 305
306 306 def bailifchanged(repo, merge=True):
307 307 if merge and repo.dirstate.p2() != nullid:
308 308 raise util.Abort(_('outstanding uncommitted merge'))
309 309 modified, added, removed, deleted = repo.status()[:4]
310 310 if modified or added or removed or deleted:
311 311 raise util.Abort(_('uncommitted changes'))
312 312 ctx = repo[None]
313 313 for s in sorted(ctx.substate):
314 314 ctx.sub(s).bailifchanged()
315 315
316 316 def logmessage(ui, opts):
317 317 """ get the log message according to -m and -l option """
318 318 message = opts.get('message')
319 319 logfile = opts.get('logfile')
320 320
321 321 if message and logfile:
322 322 raise util.Abort(_('options --message and --logfile are mutually '
323 323 'exclusive'))
324 324 if not message and logfile:
325 325 try:
326 326 if logfile == '-':
327 327 message = ui.fin.read()
328 328 else:
329 329 message = '\n'.join(util.readfile(logfile).splitlines())
330 330 except IOError as inst:
331 331 raise util.Abort(_("can't read commit message '%s': %s") %
332 332 (logfile, inst.strerror))
333 333 return message
334 334
335 335 def mergeeditform(ctxorbool, baseformname):
336 336 """return appropriate editform name (referencing a committemplate)
337 337
338 338 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
339 339 merging is committed.
340 340
341 341 This returns baseformname with '.merge' appended if it is a merge,
342 342 otherwise '.normal' is appended.
343 343 """
344 344 if isinstance(ctxorbool, bool):
345 345 if ctxorbool:
346 346 return baseformname + ".merge"
347 347 elif 1 < len(ctxorbool.parents()):
348 348 return baseformname + ".merge"
349 349
350 350 return baseformname + ".normal"
351 351
352 352 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
353 353 editform='', **opts):
354 354 """get appropriate commit message editor according to '--edit' option
355 355
356 356 'finishdesc' is a function to be called with edited commit message
357 357 (= 'description' of the new changeset) just after editing, but
358 358 before checking empty-ness. It should return actual text to be
359 359 stored into history. This allows to change description before
360 360 storing.
361 361
362 362 'extramsg' is a extra message to be shown in the editor instead of
363 363 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
364 364 is automatically added.
365 365
366 366 'editform' is a dot-separated list of names, to distinguish
367 367 the purpose of commit text editing.
368 368
369 369 'getcommiteditor' returns 'commitforceeditor' regardless of
370 370 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
371 371 they are specific for usage in MQ.
372 372 """
373 373 if edit or finishdesc or extramsg:
374 374 return lambda r, c, s: commitforceeditor(r, c, s,
375 375 finishdesc=finishdesc,
376 376 extramsg=extramsg,
377 377 editform=editform)
378 378 elif editform:
379 379 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
380 380 else:
381 381 return commiteditor
382 382
383 383 def loglimit(opts):
384 384 """get the log limit according to option -l/--limit"""
385 385 limit = opts.get('limit')
386 386 if limit:
387 387 try:
388 388 limit = int(limit)
389 389 except ValueError:
390 390 raise util.Abort(_('limit must be a positive integer'))
391 391 if limit <= 0:
392 392 raise util.Abort(_('limit must be positive'))
393 393 else:
394 394 limit = None
395 395 return limit
396 396
397 397 def makefilename(repo, pat, node, desc=None,
398 398 total=None, seqno=None, revwidth=None, pathname=None):
399 399 node_expander = {
400 400 'H': lambda: hex(node),
401 401 'R': lambda: str(repo.changelog.rev(node)),
402 402 'h': lambda: short(node),
403 403 'm': lambda: re.sub('[^\w]', '_', str(desc))
404 404 }
405 405 expander = {
406 406 '%': lambda: '%',
407 407 'b': lambda: os.path.basename(repo.root),
408 408 }
409 409
410 410 try:
411 411 if node:
412 412 expander.update(node_expander)
413 413 if node:
414 414 expander['r'] = (lambda:
415 415 str(repo.changelog.rev(node)).zfill(revwidth or 0))
416 416 if total is not None:
417 417 expander['N'] = lambda: str(total)
418 418 if seqno is not None:
419 419 expander['n'] = lambda: str(seqno)
420 420 if total is not None and seqno is not None:
421 421 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
422 422 if pathname is not None:
423 423 expander['s'] = lambda: os.path.basename(pathname)
424 424 expander['d'] = lambda: os.path.dirname(pathname) or '.'
425 425 expander['p'] = lambda: pathname
426 426
427 427 newname = []
428 428 patlen = len(pat)
429 429 i = 0
430 430 while i < patlen:
431 431 c = pat[i]
432 432 if c == '%':
433 433 i += 1
434 434 c = pat[i]
435 435 c = expander[c]()
436 436 newname.append(c)
437 437 i += 1
438 438 return ''.join(newname)
439 439 except KeyError as inst:
440 440 raise util.Abort(_("invalid format spec '%%%s' in output filename") %
441 441 inst.args[0])
442 442
443 443 def makefileobj(repo, pat, node=None, desc=None, total=None,
444 444 seqno=None, revwidth=None, mode='wb', modemap=None,
445 445 pathname=None):
446 446
447 447 writable = mode not in ('r', 'rb')
448 448
449 449 if not pat or pat == '-':
450 450 if writable:
451 451 fp = repo.ui.fout
452 452 else:
453 453 fp = repo.ui.fin
454 454 if util.safehasattr(fp, 'fileno'):
455 455 return os.fdopen(os.dup(fp.fileno()), mode)
456 456 else:
457 457 # if this fp can't be duped properly, return
458 458 # a dummy object that can be closed
459 459 class wrappedfileobj(object):
460 460 noop = lambda x: None
461 461 def __init__(self, f):
462 462 self.f = f
463 463 def __getattr__(self, attr):
464 464 if attr == 'close':
465 465 return self.noop
466 466 else:
467 467 return getattr(self.f, attr)
468 468
469 469 return wrappedfileobj(fp)
470 470 if util.safehasattr(pat, 'write') and writable:
471 471 return pat
472 472 if util.safehasattr(pat, 'read') and 'r' in mode:
473 473 return pat
474 474 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
475 475 if modemap is not None:
476 476 mode = modemap.get(fn, mode)
477 477 if mode == 'wb':
478 478 modemap[fn] = 'ab'
479 479 return open(fn, mode)
480 480
481 481 def openrevlog(repo, cmd, file_, opts):
482 482 """opens the changelog, manifest, a filelog or a given revlog"""
483 483 cl = opts['changelog']
484 484 mf = opts['manifest']
485 485 dir = opts['dir']
486 486 msg = None
487 487 if cl and mf:
488 488 msg = _('cannot specify --changelog and --manifest at the same time')
489 489 elif cl and dir:
490 490 msg = _('cannot specify --changelog and --dir at the same time')
491 491 elif cl or mf:
492 492 if file_:
493 493 msg = _('cannot specify filename with --changelog or --manifest')
494 494 elif not repo:
495 495 msg = _('cannot specify --changelog or --manifest or --dir '
496 496 'without a repository')
497 497 if msg:
498 498 raise util.Abort(msg)
499 499
500 500 r = None
501 501 if repo:
502 502 if cl:
503 503 r = repo.unfiltered().changelog
504 504 elif dir:
505 505 if 'treemanifest' not in repo.requirements:
506 506 raise util.Abort(_("--dir can only be used on repos with "
507 507 "treemanifest enabled"))
508 508 dirlog = repo.dirlog(file_)
509 509 if len(dirlog):
510 510 r = dirlog
511 511 elif mf:
512 512 r = repo.manifest
513 513 elif file_:
514 514 filelog = repo.file(file_)
515 515 if len(filelog):
516 516 r = filelog
517 517 if not r:
518 518 if not file_:
519 519 raise error.CommandError(cmd, _('invalid arguments'))
520 520 if not os.path.isfile(file_):
521 521 raise util.Abort(_("revlog '%s' not found") % file_)
522 522 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
523 523 file_[:-2] + ".i")
524 524 return r
525 525
526 526 def copy(ui, repo, pats, opts, rename=False):
527 527 # called with the repo lock held
528 528 #
529 529 # hgsep => pathname that uses "/" to separate directories
530 530 # ossep => pathname that uses os.sep to separate directories
531 531 cwd = repo.getcwd()
532 532 targets = {}
533 533 after = opts.get("after")
534 534 dryrun = opts.get("dry_run")
535 535 wctx = repo[None]
536 536
537 537 def walkpat(pat):
538 538 srcs = []
539 539 if after:
540 540 badstates = '?'
541 541 else:
542 542 badstates = '?r'
543 543 m = scmutil.match(repo[None], [pat], opts, globbed=True)
544 544 for abs in repo.walk(m):
545 545 state = repo.dirstate[abs]
546 546 rel = m.rel(abs)
547 547 exact = m.exact(abs)
548 548 if state in badstates:
549 549 if exact and state == '?':
550 550 ui.warn(_('%s: not copying - file is not managed\n') % rel)
551 551 if exact and state == 'r':
552 552 ui.warn(_('%s: not copying - file has been marked for'
553 553 ' remove\n') % rel)
554 554 continue
555 555 # abs: hgsep
556 556 # rel: ossep
557 557 srcs.append((abs, rel, exact))
558 558 return srcs
559 559
560 560 # abssrc: hgsep
561 561 # relsrc: ossep
562 562 # otarget: ossep
563 563 def copyfile(abssrc, relsrc, otarget, exact):
564 564 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
565 565 if '/' in abstarget:
566 566 # We cannot normalize abstarget itself, this would prevent
567 567 # case only renames, like a => A.
568 568 abspath, absname = abstarget.rsplit('/', 1)
569 569 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
570 570 reltarget = repo.pathto(abstarget, cwd)
571 571 target = repo.wjoin(abstarget)
572 572 src = repo.wjoin(abssrc)
573 573 state = repo.dirstate[abstarget]
574 574
575 575 scmutil.checkportable(ui, abstarget)
576 576
577 577 # check for collisions
578 578 prevsrc = targets.get(abstarget)
579 579 if prevsrc is not None:
580 580 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
581 581 (reltarget, repo.pathto(abssrc, cwd),
582 582 repo.pathto(prevsrc, cwd)))
583 583 return
584 584
585 585 # check for overwrites
586 586 exists = os.path.lexists(target)
587 587 samefile = False
588 588 if exists and abssrc != abstarget:
589 589 if (repo.dirstate.normalize(abssrc) ==
590 590 repo.dirstate.normalize(abstarget)):
591 591 if not rename:
592 592 ui.warn(_("%s: can't copy - same file\n") % reltarget)
593 593 return
594 594 exists = False
595 595 samefile = True
596 596
597 597 if not after and exists or after and state in 'mn':
598 598 if not opts['force']:
599 599 ui.warn(_('%s: not overwriting - file exists\n') %
600 600 reltarget)
601 601 return
602 602
603 603 if after:
604 604 if not exists:
605 605 if rename:
606 606 ui.warn(_('%s: not recording move - %s does not exist\n') %
607 607 (relsrc, reltarget))
608 608 else:
609 609 ui.warn(_('%s: not recording copy - %s does not exist\n') %
610 610 (relsrc, reltarget))
611 611 return
612 612 elif not dryrun:
613 613 try:
614 614 if exists:
615 615 os.unlink(target)
616 616 targetdir = os.path.dirname(target) or '.'
617 617 if not os.path.isdir(targetdir):
618 618 os.makedirs(targetdir)
619 619 if samefile:
620 620 tmp = target + "~hgrename"
621 621 os.rename(src, tmp)
622 622 os.rename(tmp, target)
623 623 else:
624 624 util.copyfile(src, target)
625 625 srcexists = True
626 626 except IOError as inst:
627 627 if inst.errno == errno.ENOENT:
628 628 ui.warn(_('%s: deleted in working directory\n') % relsrc)
629 629 srcexists = False
630 630 else:
631 631 ui.warn(_('%s: cannot copy - %s\n') %
632 632 (relsrc, inst.strerror))
633 633 return True # report a failure
634 634
635 635 if ui.verbose or not exact:
636 636 if rename:
637 637 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
638 638 else:
639 639 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
640 640
641 641 targets[abstarget] = abssrc
642 642
643 643 # fix up dirstate
644 644 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
645 645 dryrun=dryrun, cwd=cwd)
646 646 if rename and not dryrun:
647 647 if not after and srcexists and not samefile:
648 648 util.unlinkpath(repo.wjoin(abssrc))
649 649 wctx.forget([abssrc])
650 650
651 651 # pat: ossep
652 652 # dest ossep
653 653 # srcs: list of (hgsep, hgsep, ossep, bool)
654 654 # return: function that takes hgsep and returns ossep
655 655 def targetpathfn(pat, dest, srcs):
656 656 if os.path.isdir(pat):
657 657 abspfx = pathutil.canonpath(repo.root, cwd, pat)
658 658 abspfx = util.localpath(abspfx)
659 659 if destdirexists:
660 660 striplen = len(os.path.split(abspfx)[0])
661 661 else:
662 662 striplen = len(abspfx)
663 663 if striplen:
664 664 striplen += len(os.sep)
665 665 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
666 666 elif destdirexists:
667 667 res = lambda p: os.path.join(dest,
668 668 os.path.basename(util.localpath(p)))
669 669 else:
670 670 res = lambda p: dest
671 671 return res
672 672
673 673 # pat: ossep
674 674 # dest ossep
675 675 # srcs: list of (hgsep, hgsep, ossep, bool)
676 676 # return: function that takes hgsep and returns ossep
677 677 def targetpathafterfn(pat, dest, srcs):
678 678 if matchmod.patkind(pat):
679 679 # a mercurial pattern
680 680 res = lambda p: os.path.join(dest,
681 681 os.path.basename(util.localpath(p)))
682 682 else:
683 683 abspfx = pathutil.canonpath(repo.root, cwd, pat)
684 684 if len(abspfx) < len(srcs[0][0]):
685 685 # A directory. Either the target path contains the last
686 686 # component of the source path or it does not.
687 687 def evalpath(striplen):
688 688 score = 0
689 689 for s in srcs:
690 690 t = os.path.join(dest, util.localpath(s[0])[striplen:])
691 691 if os.path.lexists(t):
692 692 score += 1
693 693 return score
694 694
695 695 abspfx = util.localpath(abspfx)
696 696 striplen = len(abspfx)
697 697 if striplen:
698 698 striplen += len(os.sep)
699 699 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
700 700 score = evalpath(striplen)
701 701 striplen1 = len(os.path.split(abspfx)[0])
702 702 if striplen1:
703 703 striplen1 += len(os.sep)
704 704 if evalpath(striplen1) > score:
705 705 striplen = striplen1
706 706 res = lambda p: os.path.join(dest,
707 707 util.localpath(p)[striplen:])
708 708 else:
709 709 # a file
710 710 if destdirexists:
711 711 res = lambda p: os.path.join(dest,
712 712 os.path.basename(util.localpath(p)))
713 713 else:
714 714 res = lambda p: dest
715 715 return res
716 716
717 717 pats = scmutil.expandpats(pats)
718 718 if not pats:
719 719 raise util.Abort(_('no source or destination specified'))
720 720 if len(pats) == 1:
721 721 raise util.Abort(_('no destination specified'))
722 722 dest = pats.pop()
723 723 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
724 724 if not destdirexists:
725 725 if len(pats) > 1 or matchmod.patkind(pats[0]):
726 726 raise util.Abort(_('with multiple sources, destination must be an '
727 727 'existing directory'))
728 728 if util.endswithsep(dest):
729 729 raise util.Abort(_('destination %s is not a directory') % dest)
730 730
731 731 tfn = targetpathfn
732 732 if after:
733 733 tfn = targetpathafterfn
734 734 copylist = []
735 735 for pat in pats:
736 736 srcs = walkpat(pat)
737 737 if not srcs:
738 738 continue
739 739 copylist.append((tfn(pat, dest, srcs), srcs))
740 740 if not copylist:
741 741 raise util.Abort(_('no files to copy'))
742 742
743 743 errors = 0
744 744 for targetpath, srcs in copylist:
745 745 for abssrc, relsrc, exact in srcs:
746 746 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
747 747 errors += 1
748 748
749 749 if errors:
750 750 ui.warn(_('(consider using --after)\n'))
751 751
752 752 return errors != 0
753 753
754 754 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
755 755 runargs=None, appendpid=False):
756 756 '''Run a command as a service.'''
757 757
758 758 def writepid(pid):
759 759 if opts['pid_file']:
760 760 if appendpid:
761 761 mode = 'a'
762 762 else:
763 763 mode = 'w'
764 764 fp = open(opts['pid_file'], mode)
765 765 fp.write(str(pid) + '\n')
766 766 fp.close()
767 767
768 768 if opts['daemon'] and not opts['daemon_pipefds']:
769 769 # Signal child process startup with file removal
770 770 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
771 771 os.close(lockfd)
772 772 try:
773 773 if not runargs:
774 774 runargs = util.hgcmd() + sys.argv[1:]
775 775 runargs.append('--daemon-pipefds=%s' % lockpath)
776 776 # Don't pass --cwd to the child process, because we've already
777 777 # changed directory.
778 778 for i in xrange(1, len(runargs)):
779 779 if runargs[i].startswith('--cwd='):
780 780 del runargs[i]
781 781 break
782 782 elif runargs[i].startswith('--cwd'):
783 783 del runargs[i:i + 2]
784 784 break
785 785 def condfn():
786 786 return not os.path.exists(lockpath)
787 787 pid = util.rundetached(runargs, condfn)
788 788 if pid < 0:
789 789 raise util.Abort(_('child process failed to start'))
790 790 writepid(pid)
791 791 finally:
792 792 try:
793 793 os.unlink(lockpath)
794 794 except OSError as e:
795 795 if e.errno != errno.ENOENT:
796 796 raise
797 797 if parentfn:
798 798 return parentfn(pid)
799 799 else:
800 800 return
801 801
802 802 if initfn:
803 803 initfn()
804 804
805 805 if not opts['daemon']:
806 806 writepid(os.getpid())
807 807
808 808 if opts['daemon_pipefds']:
809 809 lockpath = opts['daemon_pipefds']
810 810 try:
811 811 os.setsid()
812 812 except AttributeError:
813 813 pass
814 814 os.unlink(lockpath)
815 815 util.hidewindow()
816 816 sys.stdout.flush()
817 817 sys.stderr.flush()
818 818
819 819 nullfd = os.open(os.devnull, os.O_RDWR)
820 820 logfilefd = nullfd
821 821 if logfile:
822 822 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
823 823 os.dup2(nullfd, 0)
824 824 os.dup2(logfilefd, 1)
825 825 os.dup2(logfilefd, 2)
826 826 if nullfd not in (0, 1, 2):
827 827 os.close(nullfd)
828 828 if logfile and logfilefd not in (0, 1, 2):
829 829 os.close(logfilefd)
830 830
831 831 if runfn:
832 832 return runfn()
833 833
834 834 ## facility to let extension process additional data into an import patch
835 835 # list of identifier to be executed in order
836 836 extrapreimport = [] # run before commit
837 837 extrapostimport = [] # run after commit
838 838 # mapping from identifier to actual import function
839 839 #
840 840 # 'preimport' are run before the commit is made and are provided the following
841 841 # arguments:
842 842 # - repo: the localrepository instance,
843 843 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
844 844 # - extra: the future extra dictionnary of the changeset, please mutate it,
845 845 # - opts: the import options.
846 846 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
847 847 # mutation of in memory commit and more. Feel free to rework the code to get
848 848 # there.
849 849 extrapreimportmap = {}
850 850 # 'postimport' are run after the commit is made and are provided the following
851 851 # argument:
852 852 # - ctx: the changectx created by import.
853 853 extrapostimportmap = {}
854 854
855 855 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
856 856 """Utility function used by commands.import to import a single patch
857 857
858 858 This function is explicitly defined here to help the evolve extension to
859 859 wrap this part of the import logic.
860 860
861 861 The API is currently a bit ugly because it a simple code translation from
862 862 the import command. Feel free to make it better.
863 863
864 864 :hunk: a patch (as a binary string)
865 865 :parents: nodes that will be parent of the created commit
866 866 :opts: the full dict of option passed to the import command
867 867 :msgs: list to save commit message to.
868 868 (used in case we need to save it when failing)
869 869 :updatefunc: a function that update a repo to a given node
870 870 updatefunc(<repo>, <node>)
871 871 """
872 872 # avoid cycle context -> subrepo -> cmdutil
873 873 import context
874 874 extractdata = patch.extract(ui, hunk)
875 875 tmpname = extractdata.get('filename')
876 876 message = extractdata.get('message')
877 877 user = extractdata.get('user')
878 878 date = extractdata.get('date')
879 879 branch = extractdata.get('branch')
880 880 nodeid = extractdata.get('nodeid')
881 881 p1 = extractdata.get('p1')
882 882 p2 = extractdata.get('p2')
883 883
884 884 update = not opts.get('bypass')
885 885 strip = opts["strip"]
886 886 prefix = opts["prefix"]
887 887 sim = float(opts.get('similarity') or 0)
888 888 if not tmpname:
889 889 return (None, None, False)
890 890 msg = _('applied to working directory')
891 891
892 892 rejects = False
893 dsguard = None
894 893
895 894 try:
896 895 cmdline_message = logmessage(ui, opts)
897 896 if cmdline_message:
898 897 # pickup the cmdline msg
899 898 message = cmdline_message
900 899 elif message:
901 900 # pickup the patch msg
902 901 message = message.strip()
903 902 else:
904 903 # launch the editor
905 904 message = None
906 905 ui.debug('message:\n%s\n' % message)
907 906
908 907 if len(parents) == 1:
909 908 parents.append(repo[nullid])
910 909 if opts.get('exact'):
911 910 if not nodeid or not p1:
912 911 raise util.Abort(_('not a Mercurial patch'))
913 912 p1 = repo[p1]
914 913 p2 = repo[p2 or nullid]
915 914 elif p2:
916 915 try:
917 916 p1 = repo[p1]
918 917 p2 = repo[p2]
919 918 # Without any options, consider p2 only if the
920 919 # patch is being applied on top of the recorded
921 920 # first parent.
922 921 if p1 != parents[0]:
923 922 p1 = parents[0]
924 923 p2 = repo[nullid]
925 924 except error.RepoError:
926 925 p1, p2 = parents
927 926 if p2.node() == nullid:
928 927 ui.warn(_("warning: import the patch as a normal revision\n"
929 928 "(use --exact to import the patch as a merge)\n"))
930 929 else:
931 930 p1, p2 = parents
932 931
933 932 n = None
934 933 if update:
935 dsguard = dirstateguard(repo, 'tryimportone')
936 934 if p1 != parents[0]:
937 935 updatefunc(repo, p1.node())
938 936 if p2 != parents[1]:
939 937 repo.setparents(p1.node(), p2.node())
940 938
941 939 if opts.get('exact') or opts.get('import_branch'):
942 940 repo.dirstate.setbranch(branch or 'default')
943 941
944 942 partial = opts.get('partial', False)
945 943 files = set()
946 944 try:
947 945 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
948 946 files=files, eolmode=None, similarity=sim / 100.0)
949 947 except patch.PatchError as e:
950 948 if not partial:
951 949 raise util.Abort(str(e))
952 950 if partial:
953 951 rejects = True
954 952
955 953 files = list(files)
956 954 if opts.get('no_commit'):
957 955 if message:
958 956 msgs.append(message)
959 957 else:
960 958 if opts.get('exact') or p2:
961 959 # If you got here, you either use --force and know what
962 960 # you are doing or used --exact or a merge patch while
963 961 # being updated to its first parent.
964 962 m = None
965 963 else:
966 964 m = scmutil.matchfiles(repo, files or [])
967 965 editform = mergeeditform(repo[None], 'import.normal')
968 966 if opts.get('exact'):
969 967 editor = None
970 968 else:
971 969 editor = getcommiteditor(editform=editform, **opts)
972 970 allowemptyback = repo.ui.backupconfig('ui', 'allowemptycommit')
973 971 extra = {}
974 972 for idfunc in extrapreimport:
975 973 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
976 974 try:
977 975 if partial:
978 976 repo.ui.setconfig('ui', 'allowemptycommit', True)
979 977 n = repo.commit(message, opts.get('user') or user,
980 978 opts.get('date') or date, match=m,
981 979 editor=editor, extra=extra)
982 980 for idfunc in extrapostimport:
983 981 extrapostimportmap[idfunc](repo[n])
984 982 finally:
985 983 repo.ui.restoreconfig(allowemptyback)
986 dsguard.close()
987 984 else:
988 985 if opts.get('exact') or opts.get('import_branch'):
989 986 branch = branch or 'default'
990 987 else:
991 988 branch = p1.branch()
992 989 store = patch.filestore()
993 990 try:
994 991 files = set()
995 992 try:
996 993 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
997 994 files, eolmode=None)
998 995 except patch.PatchError as e:
999 996 raise util.Abort(str(e))
1000 997 if opts.get('exact'):
1001 998 editor = None
1002 999 else:
1003 1000 editor = getcommiteditor(editform='import.bypass')
1004 1001 memctx = context.makememctx(repo, (p1.node(), p2.node()),
1005 1002 message,
1006 1003 opts.get('user') or user,
1007 1004 opts.get('date') or date,
1008 1005 branch, files, store,
1009 1006 editor=editor)
1010 1007 n = memctx.commit()
1011 1008 finally:
1012 1009 store.close()
1013 1010 if opts.get('exact') and opts.get('no_commit'):
1014 1011 # --exact with --no-commit is still useful in that it does merge
1015 1012 # and branch bits
1016 1013 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1017 1014 elif opts.get('exact') and hex(n) != nodeid:
1018 1015 raise util.Abort(_('patch is damaged or loses information'))
1019 1016 if n:
1020 1017 # i18n: refers to a short changeset id
1021 1018 msg = _('created %s') % short(n)
1022 1019 return (msg, n, rejects)
1023 1020 finally:
1024 lockmod.release(dsguard)
1025 1021 os.unlink(tmpname)
1026 1022
1027 1023 # facility to let extensions include additional data in an exported patch
1028 1024 # list of identifiers to be executed in order
1029 1025 extraexport = []
1030 1026 # mapping from identifier to actual export function
1031 1027 # function as to return a string to be added to the header or None
1032 1028 # it is given two arguments (sequencenumber, changectx)
1033 1029 extraexportmap = {}
1034 1030
1035 1031 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1036 1032 opts=None, match=None):
1037 1033 '''export changesets as hg patches.'''
1038 1034
1039 1035 total = len(revs)
1040 1036 revwidth = max([len(str(rev)) for rev in revs])
1041 1037 filemode = {}
1042 1038
1043 1039 def single(rev, seqno, fp):
1044 1040 ctx = repo[rev]
1045 1041 node = ctx.node()
1046 1042 parents = [p.node() for p in ctx.parents() if p]
1047 1043 branch = ctx.branch()
1048 1044 if switch_parent:
1049 1045 parents.reverse()
1050 1046
1051 1047 if parents:
1052 1048 prev = parents[0]
1053 1049 else:
1054 1050 prev = nullid
1055 1051
1056 1052 shouldclose = False
1057 1053 if not fp and len(template) > 0:
1058 1054 desc_lines = ctx.description().rstrip().split('\n')
1059 1055 desc = desc_lines[0] #Commit always has a first line.
1060 1056 fp = makefileobj(repo, template, node, desc=desc, total=total,
1061 1057 seqno=seqno, revwidth=revwidth, mode='wb',
1062 1058 modemap=filemode)
1063 1059 if fp != template:
1064 1060 shouldclose = True
1065 1061 if fp and fp != sys.stdout and util.safehasattr(fp, 'name'):
1066 1062 repo.ui.note("%s\n" % fp.name)
1067 1063
1068 1064 if not fp:
1069 1065 write = repo.ui.write
1070 1066 else:
1071 1067 def write(s, **kw):
1072 1068 fp.write(s)
1073 1069
1074 1070 write("# HG changeset patch\n")
1075 1071 write("# User %s\n" % ctx.user())
1076 1072 write("# Date %d %d\n" % ctx.date())
1077 1073 write("# %s\n" % util.datestr(ctx.date()))
1078 1074 if branch and branch != 'default':
1079 1075 write("# Branch %s\n" % branch)
1080 1076 write("# Node ID %s\n" % hex(node))
1081 1077 write("# Parent %s\n" % hex(prev))
1082 1078 if len(parents) > 1:
1083 1079 write("# Parent %s\n" % hex(parents[1]))
1084 1080
1085 1081 for headerid in extraexport:
1086 1082 header = extraexportmap[headerid](seqno, ctx)
1087 1083 if header is not None:
1088 1084 write('# %s\n' % header)
1089 1085 write(ctx.description().rstrip())
1090 1086 write("\n\n")
1091 1087
1092 1088 for chunk, label in patch.diffui(repo, prev, node, match, opts=opts):
1093 1089 write(chunk, label=label)
1094 1090
1095 1091 if shouldclose:
1096 1092 fp.close()
1097 1093
1098 1094 for seqno, rev in enumerate(revs):
1099 1095 single(rev, seqno + 1, fp)
1100 1096
1101 1097 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1102 1098 changes=None, stat=False, fp=None, prefix='',
1103 1099 root='', listsubrepos=False):
1104 1100 '''show diff or diffstat.'''
1105 1101 if fp is None:
1106 1102 write = ui.write
1107 1103 else:
1108 1104 def write(s, **kw):
1109 1105 fp.write(s)
1110 1106
1111 1107 if root:
1112 1108 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1113 1109 else:
1114 1110 relroot = ''
1115 1111 if relroot != '':
1116 1112 # XXX relative roots currently don't work if the root is within a
1117 1113 # subrepo
1118 1114 uirelroot = match.uipath(relroot)
1119 1115 relroot += '/'
1120 1116 for matchroot in match.files():
1121 1117 if not matchroot.startswith(relroot):
1122 1118 ui.warn(_('warning: %s not inside relative root %s\n') % (
1123 1119 match.uipath(matchroot), uirelroot))
1124 1120
1125 1121 if stat:
1126 1122 diffopts = diffopts.copy(context=0)
1127 1123 width = 80
1128 1124 if not ui.plain():
1129 1125 width = ui.termwidth()
1130 1126 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1131 1127 prefix=prefix, relroot=relroot)
1132 1128 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1133 1129 width=width,
1134 1130 git=diffopts.git):
1135 1131 write(chunk, label=label)
1136 1132 else:
1137 1133 for chunk, label in patch.diffui(repo, node1, node2, match,
1138 1134 changes, diffopts, prefix=prefix,
1139 1135 relroot=relroot):
1140 1136 write(chunk, label=label)
1141 1137
1142 1138 if listsubrepos:
1143 1139 ctx1 = repo[node1]
1144 1140 ctx2 = repo[node2]
1145 1141 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1146 1142 tempnode2 = node2
1147 1143 try:
1148 1144 if node2 is not None:
1149 1145 tempnode2 = ctx2.substate[subpath][1]
1150 1146 except KeyError:
1151 1147 # A subrepo that existed in node1 was deleted between node1 and
1152 1148 # node2 (inclusive). Thus, ctx2's substate won't contain that
1153 1149 # subpath. The best we can do is to ignore it.
1154 1150 tempnode2 = None
1155 1151 submatch = matchmod.narrowmatcher(subpath, match)
1156 1152 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1157 1153 stat=stat, fp=fp, prefix=prefix)
1158 1154
1159 1155 class changeset_printer(object):
1160 1156 '''show changeset information when templating not requested.'''
1161 1157
1162 1158 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1163 1159 self.ui = ui
1164 1160 self.repo = repo
1165 1161 self.buffered = buffered
1166 1162 self.matchfn = matchfn
1167 1163 self.diffopts = diffopts
1168 1164 self.header = {}
1169 1165 self.hunk = {}
1170 1166 self.lastheader = None
1171 1167 self.footer = None
1172 1168
1173 1169 def flush(self, ctx):
1174 1170 rev = ctx.rev()
1175 1171 if rev in self.header:
1176 1172 h = self.header[rev]
1177 1173 if h != self.lastheader:
1178 1174 self.lastheader = h
1179 1175 self.ui.write(h)
1180 1176 del self.header[rev]
1181 1177 if rev in self.hunk:
1182 1178 self.ui.write(self.hunk[rev])
1183 1179 del self.hunk[rev]
1184 1180 return 1
1185 1181 return 0
1186 1182
1187 1183 def close(self):
1188 1184 if self.footer:
1189 1185 self.ui.write(self.footer)
1190 1186
1191 1187 def show(self, ctx, copies=None, matchfn=None, **props):
1192 1188 if self.buffered:
1193 1189 self.ui.pushbuffer()
1194 1190 self._show(ctx, copies, matchfn, props)
1195 1191 self.hunk[ctx.rev()] = self.ui.popbuffer(labeled=True)
1196 1192 else:
1197 1193 self._show(ctx, copies, matchfn, props)
1198 1194
1199 1195 def _show(self, ctx, copies, matchfn, props):
1200 1196 '''show a single changeset or file revision'''
1201 1197 changenode = ctx.node()
1202 1198 rev = ctx.rev()
1203 1199 if self.ui.debugflag:
1204 1200 hexfunc = hex
1205 1201 else:
1206 1202 hexfunc = short
1207 1203 # as of now, wctx.node() and wctx.rev() return None, but we want to
1208 1204 # show the same values as {node} and {rev} templatekw
1209 1205 revnode = (scmutil.intrev(rev), hexfunc(bin(ctx.hex())))
1210 1206
1211 1207 if self.ui.quiet:
1212 1208 self.ui.write("%d:%s\n" % revnode, label='log.node')
1213 1209 return
1214 1210
1215 1211 date = util.datestr(ctx.date())
1216 1212
1217 1213 # i18n: column positioning for "hg log"
1218 1214 self.ui.write(_("changeset: %d:%s\n") % revnode,
1219 1215 label='log.changeset changeset.%s' % ctx.phasestr())
1220 1216
1221 1217 # branches are shown first before any other names due to backwards
1222 1218 # compatibility
1223 1219 branch = ctx.branch()
1224 1220 # don't show the default branch name
1225 1221 if branch != 'default':
1226 1222 # i18n: column positioning for "hg log"
1227 1223 self.ui.write(_("branch: %s\n") % branch,
1228 1224 label='log.branch')
1229 1225
1230 1226 for name, ns in self.repo.names.iteritems():
1231 1227 # branches has special logic already handled above, so here we just
1232 1228 # skip it
1233 1229 if name == 'branches':
1234 1230 continue
1235 1231 # we will use the templatename as the color name since those two
1236 1232 # should be the same
1237 1233 for name in ns.names(self.repo, changenode):
1238 1234 self.ui.write(ns.logfmt % name,
1239 1235 label='log.%s' % ns.colorname)
1240 1236 if self.ui.debugflag:
1241 1237 # i18n: column positioning for "hg log"
1242 1238 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1243 1239 label='log.phase')
1244 1240 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1245 1241 label = 'log.parent changeset.%s' % pctx.phasestr()
1246 1242 # i18n: column positioning for "hg log"
1247 1243 self.ui.write(_("parent: %d:%s\n")
1248 1244 % (pctx.rev(), hexfunc(pctx.node())),
1249 1245 label=label)
1250 1246
1251 1247 if self.ui.debugflag and rev is not None:
1252 1248 mnode = ctx.manifestnode()
1253 1249 # i18n: column positioning for "hg log"
1254 1250 self.ui.write(_("manifest: %d:%s\n") %
1255 1251 (self.repo.manifest.rev(mnode), hex(mnode)),
1256 1252 label='ui.debug log.manifest')
1257 1253 # i18n: column positioning for "hg log"
1258 1254 self.ui.write(_("user: %s\n") % ctx.user(),
1259 1255 label='log.user')
1260 1256 # i18n: column positioning for "hg log"
1261 1257 self.ui.write(_("date: %s\n") % date,
1262 1258 label='log.date')
1263 1259
1264 1260 if self.ui.debugflag:
1265 1261 files = ctx.p1().status(ctx)[:3]
1266 1262 for key, value in zip([# i18n: column positioning for "hg log"
1267 1263 _("files:"),
1268 1264 # i18n: column positioning for "hg log"
1269 1265 _("files+:"),
1270 1266 # i18n: column positioning for "hg log"
1271 1267 _("files-:")], files):
1272 1268 if value:
1273 1269 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1274 1270 label='ui.debug log.files')
1275 1271 elif ctx.files() and self.ui.verbose:
1276 1272 # i18n: column positioning for "hg log"
1277 1273 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1278 1274 label='ui.note log.files')
1279 1275 if copies and self.ui.verbose:
1280 1276 copies = ['%s (%s)' % c for c in copies]
1281 1277 # i18n: column positioning for "hg log"
1282 1278 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1283 1279 label='ui.note log.copies')
1284 1280
1285 1281 extra = ctx.extra()
1286 1282 if extra and self.ui.debugflag:
1287 1283 for key, value in sorted(extra.items()):
1288 1284 # i18n: column positioning for "hg log"
1289 1285 self.ui.write(_("extra: %s=%s\n")
1290 1286 % (key, value.encode('string_escape')),
1291 1287 label='ui.debug log.extra')
1292 1288
1293 1289 description = ctx.description().strip()
1294 1290 if description:
1295 1291 if self.ui.verbose:
1296 1292 self.ui.write(_("description:\n"),
1297 1293 label='ui.note log.description')
1298 1294 self.ui.write(description,
1299 1295 label='ui.note log.description')
1300 1296 self.ui.write("\n\n")
1301 1297 else:
1302 1298 # i18n: column positioning for "hg log"
1303 1299 self.ui.write(_("summary: %s\n") %
1304 1300 description.splitlines()[0],
1305 1301 label='log.summary')
1306 1302 self.ui.write("\n")
1307 1303
1308 1304 self.showpatch(changenode, matchfn)
1309 1305
1310 1306 def showpatch(self, node, matchfn):
1311 1307 if not matchfn:
1312 1308 matchfn = self.matchfn
1313 1309 if matchfn:
1314 1310 stat = self.diffopts.get('stat')
1315 1311 diff = self.diffopts.get('patch')
1316 1312 diffopts = patch.diffallopts(self.ui, self.diffopts)
1317 1313 prev = self.repo.changelog.parents(node)[0]
1318 1314 if stat:
1319 1315 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1320 1316 match=matchfn, stat=True)
1321 1317 if diff:
1322 1318 if stat:
1323 1319 self.ui.write("\n")
1324 1320 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1325 1321 match=matchfn, stat=False)
1326 1322 self.ui.write("\n")
1327 1323
1328 1324 class jsonchangeset(changeset_printer):
1329 1325 '''format changeset information.'''
1330 1326
1331 1327 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1332 1328 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1333 1329 self.cache = {}
1334 1330 self._first = True
1335 1331
1336 1332 def close(self):
1337 1333 if not self._first:
1338 1334 self.ui.write("\n]\n")
1339 1335 else:
1340 1336 self.ui.write("[]\n")
1341 1337
1342 1338 def _show(self, ctx, copies, matchfn, props):
1343 1339 '''show a single changeset or file revision'''
1344 1340 rev = ctx.rev()
1345 1341 if rev is None:
1346 1342 jrev = jnode = 'null'
1347 1343 else:
1348 1344 jrev = str(rev)
1349 1345 jnode = '"%s"' % hex(ctx.node())
1350 1346 j = encoding.jsonescape
1351 1347
1352 1348 if self._first:
1353 1349 self.ui.write("[\n {")
1354 1350 self._first = False
1355 1351 else:
1356 1352 self.ui.write(",\n {")
1357 1353
1358 1354 if self.ui.quiet:
1359 1355 self.ui.write('\n "rev": %s' % jrev)
1360 1356 self.ui.write(',\n "node": %s' % jnode)
1361 1357 self.ui.write('\n }')
1362 1358 return
1363 1359
1364 1360 self.ui.write('\n "rev": %s' % jrev)
1365 1361 self.ui.write(',\n "node": %s' % jnode)
1366 1362 self.ui.write(',\n "branch": "%s"' % j(ctx.branch()))
1367 1363 self.ui.write(',\n "phase": "%s"' % ctx.phasestr())
1368 1364 self.ui.write(',\n "user": "%s"' % j(ctx.user()))
1369 1365 self.ui.write(',\n "date": [%d, %d]' % ctx.date())
1370 1366 self.ui.write(',\n "desc": "%s"' % j(ctx.description()))
1371 1367
1372 1368 self.ui.write(',\n "bookmarks": [%s]' %
1373 1369 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1374 1370 self.ui.write(',\n "tags": [%s]' %
1375 1371 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1376 1372 self.ui.write(',\n "parents": [%s]' %
1377 1373 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1378 1374
1379 1375 if self.ui.debugflag:
1380 1376 if rev is None:
1381 1377 jmanifestnode = 'null'
1382 1378 else:
1383 1379 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1384 1380 self.ui.write(',\n "manifest": %s' % jmanifestnode)
1385 1381
1386 1382 self.ui.write(',\n "extra": {%s}' %
1387 1383 ", ".join('"%s": "%s"' % (j(k), j(v))
1388 1384 for k, v in ctx.extra().items()))
1389 1385
1390 1386 files = ctx.p1().status(ctx)
1391 1387 self.ui.write(',\n "modified": [%s]' %
1392 1388 ", ".join('"%s"' % j(f) for f in files[0]))
1393 1389 self.ui.write(',\n "added": [%s]' %
1394 1390 ", ".join('"%s"' % j(f) for f in files[1]))
1395 1391 self.ui.write(',\n "removed": [%s]' %
1396 1392 ", ".join('"%s"' % j(f) for f in files[2]))
1397 1393
1398 1394 elif self.ui.verbose:
1399 1395 self.ui.write(',\n "files": [%s]' %
1400 1396 ", ".join('"%s"' % j(f) for f in ctx.files()))
1401 1397
1402 1398 if copies:
1403 1399 self.ui.write(',\n "copies": {%s}' %
1404 1400 ", ".join('"%s": "%s"' % (j(k), j(v))
1405 1401 for k, v in copies))
1406 1402
1407 1403 matchfn = self.matchfn
1408 1404 if matchfn:
1409 1405 stat = self.diffopts.get('stat')
1410 1406 diff = self.diffopts.get('patch')
1411 1407 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1412 1408 node, prev = ctx.node(), ctx.p1().node()
1413 1409 if stat:
1414 1410 self.ui.pushbuffer()
1415 1411 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1416 1412 match=matchfn, stat=True)
1417 1413 self.ui.write(',\n "diffstat": "%s"' % j(self.ui.popbuffer()))
1418 1414 if diff:
1419 1415 self.ui.pushbuffer()
1420 1416 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1421 1417 match=matchfn, stat=False)
1422 1418 self.ui.write(',\n "diff": "%s"' % j(self.ui.popbuffer()))
1423 1419
1424 1420 self.ui.write("\n }")
1425 1421
1426 1422 class changeset_templater(changeset_printer):
1427 1423 '''format changeset information.'''
1428 1424
1429 1425 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1430 1426 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1431 1427 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
1432 1428 defaulttempl = {
1433 1429 'parent': '{rev}:{node|formatnode} ',
1434 1430 'manifest': '{rev}:{node|formatnode}',
1435 1431 'file_copy': '{name} ({source})',
1436 1432 'extra': '{key}={value|stringescape}'
1437 1433 }
1438 1434 # filecopy is preserved for compatibility reasons
1439 1435 defaulttempl['filecopy'] = defaulttempl['file_copy']
1440 1436 self.t = templater.templater(mapfile, {'formatnode': formatnode},
1441 1437 cache=defaulttempl)
1442 1438 if tmpl:
1443 1439 self.t.cache['changeset'] = tmpl
1444 1440
1445 1441 self.cache = {}
1446 1442
1447 1443 # find correct templates for current mode
1448 1444 tmplmodes = [
1449 1445 (True, None),
1450 1446 (self.ui.verbose, 'verbose'),
1451 1447 (self.ui.quiet, 'quiet'),
1452 1448 (self.ui.debugflag, 'debug'),
1453 1449 ]
1454 1450
1455 1451 self._parts = {'header': '', 'footer': '', 'changeset': 'changeset',
1456 1452 'docheader': '', 'docfooter': ''}
1457 1453 for mode, postfix in tmplmodes:
1458 1454 for t in self._parts:
1459 1455 cur = t
1460 1456 if postfix:
1461 1457 cur += "_" + postfix
1462 1458 if mode and cur in self.t:
1463 1459 self._parts[t] = cur
1464 1460
1465 1461 if self._parts['docheader']:
1466 1462 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1467 1463
1468 1464 def close(self):
1469 1465 if self._parts['docfooter']:
1470 1466 if not self.footer:
1471 1467 self.footer = ""
1472 1468 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1473 1469 return super(changeset_templater, self).close()
1474 1470
1475 1471 def _show(self, ctx, copies, matchfn, props):
1476 1472 '''show a single changeset or file revision'''
1477 1473 props = props.copy()
1478 1474 props.update(templatekw.keywords)
1479 1475 props['templ'] = self.t
1480 1476 props['ctx'] = ctx
1481 1477 props['repo'] = self.repo
1482 1478 props['revcache'] = {'copies': copies}
1483 1479 props['cache'] = self.cache
1484 1480
1485 1481 try:
1486 1482 # write header
1487 1483 if self._parts['header']:
1488 1484 h = templater.stringify(self.t(self._parts['header'], **props))
1489 1485 if self.buffered:
1490 1486 self.header[ctx.rev()] = h
1491 1487 else:
1492 1488 if self.lastheader != h:
1493 1489 self.lastheader = h
1494 1490 self.ui.write(h)
1495 1491
1496 1492 # write changeset metadata, then patch if requested
1497 1493 key = self._parts['changeset']
1498 1494 self.ui.write(templater.stringify(self.t(key, **props)))
1499 1495 self.showpatch(ctx.node(), matchfn)
1500 1496
1501 1497 if self._parts['footer']:
1502 1498 if not self.footer:
1503 1499 self.footer = templater.stringify(
1504 1500 self.t(self._parts['footer'], **props))
1505 1501 except KeyError as inst:
1506 1502 msg = _("%s: no key named '%s'")
1507 1503 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
1508 1504 except SyntaxError as inst:
1509 1505 raise util.Abort('%s: %s' % (self.t.mapfile, inst.args[0]))
1510 1506
1511 1507 def gettemplate(ui, tmpl, style):
1512 1508 """
1513 1509 Find the template matching the given template spec or style.
1514 1510 """
1515 1511
1516 1512 # ui settings
1517 1513 if not tmpl and not style: # template are stronger than style
1518 1514 tmpl = ui.config('ui', 'logtemplate')
1519 1515 if tmpl:
1520 1516 try:
1521 1517 tmpl = templater.unquotestring(tmpl)
1522 1518 except SyntaxError:
1523 1519 pass
1524 1520 return tmpl, None
1525 1521 else:
1526 1522 style = util.expandpath(ui.config('ui', 'style', ''))
1527 1523
1528 1524 if not tmpl and style:
1529 1525 mapfile = style
1530 1526 if not os.path.split(mapfile)[0]:
1531 1527 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1532 1528 or templater.templatepath(mapfile))
1533 1529 if mapname:
1534 1530 mapfile = mapname
1535 1531 return None, mapfile
1536 1532
1537 1533 if not tmpl:
1538 1534 return None, None
1539 1535
1540 1536 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1541 1537
1542 1538 def show_changeset(ui, repo, opts, buffered=False):
1543 1539 """show one changeset using template or regular display.
1544 1540
1545 1541 Display format will be the first non-empty hit of:
1546 1542 1. option 'template'
1547 1543 2. option 'style'
1548 1544 3. [ui] setting 'logtemplate'
1549 1545 4. [ui] setting 'style'
1550 1546 If all of these values are either the unset or the empty string,
1551 1547 regular display via changeset_printer() is done.
1552 1548 """
1553 1549 # options
1554 1550 matchfn = None
1555 1551 if opts.get('patch') or opts.get('stat'):
1556 1552 matchfn = scmutil.matchall(repo)
1557 1553
1558 1554 if opts.get('template') == 'json':
1559 1555 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1560 1556
1561 1557 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1562 1558
1563 1559 if not tmpl and not mapfile:
1564 1560 return changeset_printer(ui, repo, matchfn, opts, buffered)
1565 1561
1566 1562 try:
1567 1563 t = changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile,
1568 1564 buffered)
1569 1565 except SyntaxError as inst:
1570 1566 raise util.Abort(inst.args[0])
1571 1567 return t
1572 1568
1573 1569 def showmarker(ui, marker):
1574 1570 """utility function to display obsolescence marker in a readable way
1575 1571
1576 1572 To be used by debug function."""
1577 1573 ui.write(hex(marker.precnode()))
1578 1574 for repl in marker.succnodes():
1579 1575 ui.write(' ')
1580 1576 ui.write(hex(repl))
1581 1577 ui.write(' %X ' % marker.flags())
1582 1578 parents = marker.parentnodes()
1583 1579 if parents is not None:
1584 1580 ui.write('{%s} ' % ', '.join(hex(p) for p in parents))
1585 1581 ui.write('(%s) ' % util.datestr(marker.date()))
1586 1582 ui.write('{%s}' % (', '.join('%r: %r' % t for t in
1587 1583 sorted(marker.metadata().items())
1588 1584 if t[0] != 'date')))
1589 1585 ui.write('\n')
1590 1586
1591 1587 def finddate(ui, repo, date):
1592 1588 """Find the tipmost changeset that matches the given date spec"""
1593 1589
1594 1590 df = util.matchdate(date)
1595 1591 m = scmutil.matchall(repo)
1596 1592 results = {}
1597 1593
1598 1594 def prep(ctx, fns):
1599 1595 d = ctx.date()
1600 1596 if df(d[0]):
1601 1597 results[ctx.rev()] = d
1602 1598
1603 1599 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1604 1600 rev = ctx.rev()
1605 1601 if rev in results:
1606 1602 ui.status(_("found revision %s from %s\n") %
1607 1603 (rev, util.datestr(results[rev])))
1608 1604 return str(rev)
1609 1605
1610 1606 raise util.Abort(_("revision matching date not found"))
1611 1607
1612 1608 def increasingwindows(windowsize=8, sizelimit=512):
1613 1609 while True:
1614 1610 yield windowsize
1615 1611 if windowsize < sizelimit:
1616 1612 windowsize *= 2
1617 1613
1618 1614 class FileWalkError(Exception):
1619 1615 pass
1620 1616
1621 1617 def walkfilerevs(repo, match, follow, revs, fncache):
1622 1618 '''Walks the file history for the matched files.
1623 1619
1624 1620 Returns the changeset revs that are involved in the file history.
1625 1621
1626 1622 Throws FileWalkError if the file history can't be walked using
1627 1623 filelogs alone.
1628 1624 '''
1629 1625 wanted = set()
1630 1626 copies = []
1631 1627 minrev, maxrev = min(revs), max(revs)
1632 1628 def filerevgen(filelog, last):
1633 1629 """
1634 1630 Only files, no patterns. Check the history of each file.
1635 1631
1636 1632 Examines filelog entries within minrev, maxrev linkrev range
1637 1633 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1638 1634 tuples in backwards order
1639 1635 """
1640 1636 cl_count = len(repo)
1641 1637 revs = []
1642 1638 for j in xrange(0, last + 1):
1643 1639 linkrev = filelog.linkrev(j)
1644 1640 if linkrev < minrev:
1645 1641 continue
1646 1642 # only yield rev for which we have the changelog, it can
1647 1643 # happen while doing "hg log" during a pull or commit
1648 1644 if linkrev >= cl_count:
1649 1645 break
1650 1646
1651 1647 parentlinkrevs = []
1652 1648 for p in filelog.parentrevs(j):
1653 1649 if p != nullrev:
1654 1650 parentlinkrevs.append(filelog.linkrev(p))
1655 1651 n = filelog.node(j)
1656 1652 revs.append((linkrev, parentlinkrevs,
1657 1653 follow and filelog.renamed(n)))
1658 1654
1659 1655 return reversed(revs)
1660 1656 def iterfiles():
1661 1657 pctx = repo['.']
1662 1658 for filename in match.files():
1663 1659 if follow:
1664 1660 if filename not in pctx:
1665 1661 raise util.Abort(_('cannot follow file not in parent '
1666 1662 'revision: "%s"') % filename)
1667 1663 yield filename, pctx[filename].filenode()
1668 1664 else:
1669 1665 yield filename, None
1670 1666 for filename_node in copies:
1671 1667 yield filename_node
1672 1668
1673 1669 for file_, node in iterfiles():
1674 1670 filelog = repo.file(file_)
1675 1671 if not len(filelog):
1676 1672 if node is None:
1677 1673 # A zero count may be a directory or deleted file, so
1678 1674 # try to find matching entries on the slow path.
1679 1675 if follow:
1680 1676 raise util.Abort(
1681 1677 _('cannot follow nonexistent file: "%s"') % file_)
1682 1678 raise FileWalkError("Cannot walk via filelog")
1683 1679 else:
1684 1680 continue
1685 1681
1686 1682 if node is None:
1687 1683 last = len(filelog) - 1
1688 1684 else:
1689 1685 last = filelog.rev(node)
1690 1686
1691 1687 # keep track of all ancestors of the file
1692 1688 ancestors = set([filelog.linkrev(last)])
1693 1689
1694 1690 # iterate from latest to oldest revision
1695 1691 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1696 1692 if not follow:
1697 1693 if rev > maxrev:
1698 1694 continue
1699 1695 else:
1700 1696 # Note that last might not be the first interesting
1701 1697 # rev to us:
1702 1698 # if the file has been changed after maxrev, we'll
1703 1699 # have linkrev(last) > maxrev, and we still need
1704 1700 # to explore the file graph
1705 1701 if rev not in ancestors:
1706 1702 continue
1707 1703 # XXX insert 1327 fix here
1708 1704 if flparentlinkrevs:
1709 1705 ancestors.update(flparentlinkrevs)
1710 1706
1711 1707 fncache.setdefault(rev, []).append(file_)
1712 1708 wanted.add(rev)
1713 1709 if copied:
1714 1710 copies.append(copied)
1715 1711
1716 1712 return wanted
1717 1713
1718 1714 class _followfilter(object):
1719 1715 def __init__(self, repo, onlyfirst=False):
1720 1716 self.repo = repo
1721 1717 self.startrev = nullrev
1722 1718 self.roots = set()
1723 1719 self.onlyfirst = onlyfirst
1724 1720
1725 1721 def match(self, rev):
1726 1722 def realparents(rev):
1727 1723 if self.onlyfirst:
1728 1724 return self.repo.changelog.parentrevs(rev)[0:1]
1729 1725 else:
1730 1726 return filter(lambda x: x != nullrev,
1731 1727 self.repo.changelog.parentrevs(rev))
1732 1728
1733 1729 if self.startrev == nullrev:
1734 1730 self.startrev = rev
1735 1731 return True
1736 1732
1737 1733 if rev > self.startrev:
1738 1734 # forward: all descendants
1739 1735 if not self.roots:
1740 1736 self.roots.add(self.startrev)
1741 1737 for parent in realparents(rev):
1742 1738 if parent in self.roots:
1743 1739 self.roots.add(rev)
1744 1740 return True
1745 1741 else:
1746 1742 # backwards: all parents
1747 1743 if not self.roots:
1748 1744 self.roots.update(realparents(self.startrev))
1749 1745 if rev in self.roots:
1750 1746 self.roots.remove(rev)
1751 1747 self.roots.update(realparents(rev))
1752 1748 return True
1753 1749
1754 1750 return False
1755 1751
1756 1752 def walkchangerevs(repo, match, opts, prepare):
1757 1753 '''Iterate over files and the revs in which they changed.
1758 1754
1759 1755 Callers most commonly need to iterate backwards over the history
1760 1756 in which they are interested. Doing so has awful (quadratic-looking)
1761 1757 performance, so we use iterators in a "windowed" way.
1762 1758
1763 1759 We walk a window of revisions in the desired order. Within the
1764 1760 window, we first walk forwards to gather data, then in the desired
1765 1761 order (usually backwards) to display it.
1766 1762
1767 1763 This function returns an iterator yielding contexts. Before
1768 1764 yielding each context, the iterator will first call the prepare
1769 1765 function on each context in the window in forward order.'''
1770 1766
1771 1767 follow = opts.get('follow') or opts.get('follow_first')
1772 1768 revs = _logrevs(repo, opts)
1773 1769 if not revs:
1774 1770 return []
1775 1771 wanted = set()
1776 1772 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1777 1773 opts.get('removed'))
1778 1774 fncache = {}
1779 1775 change = repo.changectx
1780 1776
1781 1777 # First step is to fill wanted, the set of revisions that we want to yield.
1782 1778 # When it does not induce extra cost, we also fill fncache for revisions in
1783 1779 # wanted: a cache of filenames that were changed (ctx.files()) and that
1784 1780 # match the file filtering conditions.
1785 1781
1786 1782 if match.always():
1787 1783 # No files, no patterns. Display all revs.
1788 1784 wanted = revs
1789 1785 elif not slowpath:
1790 1786 # We only have to read through the filelog to find wanted revisions
1791 1787
1792 1788 try:
1793 1789 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1794 1790 except FileWalkError:
1795 1791 slowpath = True
1796 1792
1797 1793 # We decided to fall back to the slowpath because at least one
1798 1794 # of the paths was not a file. Check to see if at least one of them
1799 1795 # existed in history, otherwise simply return
1800 1796 for path in match.files():
1801 1797 if path == '.' or path in repo.store:
1802 1798 break
1803 1799 else:
1804 1800 return []
1805 1801
1806 1802 if slowpath:
1807 1803 # We have to read the changelog to match filenames against
1808 1804 # changed files
1809 1805
1810 1806 if follow:
1811 1807 raise util.Abort(_('can only follow copies/renames for explicit '
1812 1808 'filenames'))
1813 1809
1814 1810 # The slow path checks files modified in every changeset.
1815 1811 # This is really slow on large repos, so compute the set lazily.
1816 1812 class lazywantedset(object):
1817 1813 def __init__(self):
1818 1814 self.set = set()
1819 1815 self.revs = set(revs)
1820 1816
1821 1817 # No need to worry about locality here because it will be accessed
1822 1818 # in the same order as the increasing window below.
1823 1819 def __contains__(self, value):
1824 1820 if value in self.set:
1825 1821 return True
1826 1822 elif not value in self.revs:
1827 1823 return False
1828 1824 else:
1829 1825 self.revs.discard(value)
1830 1826 ctx = change(value)
1831 1827 matches = filter(match, ctx.files())
1832 1828 if matches:
1833 1829 fncache[value] = matches
1834 1830 self.set.add(value)
1835 1831 return True
1836 1832 return False
1837 1833
1838 1834 def discard(self, value):
1839 1835 self.revs.discard(value)
1840 1836 self.set.discard(value)
1841 1837
1842 1838 wanted = lazywantedset()
1843 1839
1844 1840 # it might be worthwhile to do this in the iterator if the rev range
1845 1841 # is descending and the prune args are all within that range
1846 1842 for rev in opts.get('prune', ()):
1847 1843 rev = repo[rev].rev()
1848 1844 ff = _followfilter(repo)
1849 1845 stop = min(revs[0], revs[-1])
1850 1846 for x in xrange(rev, stop - 1, -1):
1851 1847 if ff.match(x):
1852 1848 wanted = wanted - [x]
1853 1849
1854 1850 # Now that wanted is correctly initialized, we can iterate over the
1855 1851 # revision range, yielding only revisions in wanted.
1856 1852 def iterate():
1857 1853 if follow and match.always():
1858 1854 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1859 1855 def want(rev):
1860 1856 return ff.match(rev) and rev in wanted
1861 1857 else:
1862 1858 def want(rev):
1863 1859 return rev in wanted
1864 1860
1865 1861 it = iter(revs)
1866 1862 stopiteration = False
1867 1863 for windowsize in increasingwindows():
1868 1864 nrevs = []
1869 1865 for i in xrange(windowsize):
1870 1866 rev = next(it, None)
1871 1867 if rev is None:
1872 1868 stopiteration = True
1873 1869 break
1874 1870 elif want(rev):
1875 1871 nrevs.append(rev)
1876 1872 for rev in sorted(nrevs):
1877 1873 fns = fncache.get(rev)
1878 1874 ctx = change(rev)
1879 1875 if not fns:
1880 1876 def fns_generator():
1881 1877 for f in ctx.files():
1882 1878 if match(f):
1883 1879 yield f
1884 1880 fns = fns_generator()
1885 1881 prepare(ctx, fns)
1886 1882 for rev in nrevs:
1887 1883 yield change(rev)
1888 1884
1889 1885 if stopiteration:
1890 1886 break
1891 1887
1892 1888 return iterate()
1893 1889
1894 1890 def _makefollowlogfilematcher(repo, files, followfirst):
1895 1891 # When displaying a revision with --patch --follow FILE, we have
1896 1892 # to know which file of the revision must be diffed. With
1897 1893 # --follow, we want the names of the ancestors of FILE in the
1898 1894 # revision, stored in "fcache". "fcache" is populated by
1899 1895 # reproducing the graph traversal already done by --follow revset
1900 1896 # and relating linkrevs to file names (which is not "correct" but
1901 1897 # good enough).
1902 1898 fcache = {}
1903 1899 fcacheready = [False]
1904 1900 pctx = repo['.']
1905 1901
1906 1902 def populate():
1907 1903 for fn in files:
1908 1904 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
1909 1905 for c in i:
1910 1906 fcache.setdefault(c.linkrev(), set()).add(c.path())
1911 1907
1912 1908 def filematcher(rev):
1913 1909 if not fcacheready[0]:
1914 1910 # Lazy initialization
1915 1911 fcacheready[0] = True
1916 1912 populate()
1917 1913 return scmutil.matchfiles(repo, fcache.get(rev, []))
1918 1914
1919 1915 return filematcher
1920 1916
1921 1917 def _makenofollowlogfilematcher(repo, pats, opts):
1922 1918 '''hook for extensions to override the filematcher for non-follow cases'''
1923 1919 return None
1924 1920
1925 1921 def _makelogrevset(repo, pats, opts, revs):
1926 1922 """Return (expr, filematcher) where expr is a revset string built
1927 1923 from log options and file patterns or None. If --stat or --patch
1928 1924 are not passed filematcher is None. Otherwise it is a callable
1929 1925 taking a revision number and returning a match objects filtering
1930 1926 the files to be detailed when displaying the revision.
1931 1927 """
1932 1928 opt2revset = {
1933 1929 'no_merges': ('not merge()', None),
1934 1930 'only_merges': ('merge()', None),
1935 1931 '_ancestors': ('ancestors(%(val)s)', None),
1936 1932 '_fancestors': ('_firstancestors(%(val)s)', None),
1937 1933 '_descendants': ('descendants(%(val)s)', None),
1938 1934 '_fdescendants': ('_firstdescendants(%(val)s)', None),
1939 1935 '_matchfiles': ('_matchfiles(%(val)s)', None),
1940 1936 'date': ('date(%(val)r)', None),
1941 1937 'branch': ('branch(%(val)r)', ' or '),
1942 1938 '_patslog': ('filelog(%(val)r)', ' or '),
1943 1939 '_patsfollow': ('follow(%(val)r)', ' or '),
1944 1940 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
1945 1941 'keyword': ('keyword(%(val)r)', ' or '),
1946 1942 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
1947 1943 'user': ('user(%(val)r)', ' or '),
1948 1944 }
1949 1945
1950 1946 opts = dict(opts)
1951 1947 # follow or not follow?
1952 1948 follow = opts.get('follow') or opts.get('follow_first')
1953 1949 if opts.get('follow_first'):
1954 1950 followfirst = 1
1955 1951 else:
1956 1952 followfirst = 0
1957 1953 # --follow with FILE behavior depends on revs...
1958 1954 it = iter(revs)
1959 1955 startrev = it.next()
1960 1956 followdescendants = startrev < next(it, startrev)
1961 1957
1962 1958 # branch and only_branch are really aliases and must be handled at
1963 1959 # the same time
1964 1960 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
1965 1961 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
1966 1962 # pats/include/exclude are passed to match.match() directly in
1967 1963 # _matchfiles() revset but walkchangerevs() builds its matcher with
1968 1964 # scmutil.match(). The difference is input pats are globbed on
1969 1965 # platforms without shell expansion (windows).
1970 1966 wctx = repo[None]
1971 1967 match, pats = scmutil.matchandpats(wctx, pats, opts)
1972 1968 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1973 1969 opts.get('removed'))
1974 1970 if not slowpath:
1975 1971 for f in match.files():
1976 1972 if follow and f not in wctx:
1977 1973 # If the file exists, it may be a directory, so let it
1978 1974 # take the slow path.
1979 1975 if os.path.exists(repo.wjoin(f)):
1980 1976 slowpath = True
1981 1977 continue
1982 1978 else:
1983 1979 raise util.Abort(_('cannot follow file not in parent '
1984 1980 'revision: "%s"') % f)
1985 1981 filelog = repo.file(f)
1986 1982 if not filelog:
1987 1983 # A zero count may be a directory or deleted file, so
1988 1984 # try to find matching entries on the slow path.
1989 1985 if follow:
1990 1986 raise util.Abort(
1991 1987 _('cannot follow nonexistent file: "%s"') % f)
1992 1988 slowpath = True
1993 1989
1994 1990 # We decided to fall back to the slowpath because at least one
1995 1991 # of the paths was not a file. Check to see if at least one of them
1996 1992 # existed in history - in that case, we'll continue down the
1997 1993 # slowpath; otherwise, we can turn off the slowpath
1998 1994 if slowpath:
1999 1995 for path in match.files():
2000 1996 if path == '.' or path in repo.store:
2001 1997 break
2002 1998 else:
2003 1999 slowpath = False
2004 2000
2005 2001 fpats = ('_patsfollow', '_patsfollowfirst')
2006 2002 fnopats = (('_ancestors', '_fancestors'),
2007 2003 ('_descendants', '_fdescendants'))
2008 2004 if slowpath:
2009 2005 # See walkchangerevs() slow path.
2010 2006 #
2011 2007 # pats/include/exclude cannot be represented as separate
2012 2008 # revset expressions as their filtering logic applies at file
2013 2009 # level. For instance "-I a -X a" matches a revision touching
2014 2010 # "a" and "b" while "file(a) and not file(b)" does
2015 2011 # not. Besides, filesets are evaluated against the working
2016 2012 # directory.
2017 2013 matchargs = ['r:', 'd:relpath']
2018 2014 for p in pats:
2019 2015 matchargs.append('p:' + p)
2020 2016 for p in opts.get('include', []):
2021 2017 matchargs.append('i:' + p)
2022 2018 for p in opts.get('exclude', []):
2023 2019 matchargs.append('x:' + p)
2024 2020 matchargs = ','.join(('%r' % p) for p in matchargs)
2025 2021 opts['_matchfiles'] = matchargs
2026 2022 if follow:
2027 2023 opts[fnopats[0][followfirst]] = '.'
2028 2024 else:
2029 2025 if follow:
2030 2026 if pats:
2031 2027 # follow() revset interprets its file argument as a
2032 2028 # manifest entry, so use match.files(), not pats.
2033 2029 opts[fpats[followfirst]] = list(match.files())
2034 2030 else:
2035 2031 op = fnopats[followdescendants][followfirst]
2036 2032 opts[op] = 'rev(%d)' % startrev
2037 2033 else:
2038 2034 opts['_patslog'] = list(pats)
2039 2035
2040 2036 filematcher = None
2041 2037 if opts.get('patch') or opts.get('stat'):
2042 2038 # When following files, track renames via a special matcher.
2043 2039 # If we're forced to take the slowpath it means we're following
2044 2040 # at least one pattern/directory, so don't bother with rename tracking.
2045 2041 if follow and not match.always() and not slowpath:
2046 2042 # _makefollowlogfilematcher expects its files argument to be
2047 2043 # relative to the repo root, so use match.files(), not pats.
2048 2044 filematcher = _makefollowlogfilematcher(repo, match.files(),
2049 2045 followfirst)
2050 2046 else:
2051 2047 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2052 2048 if filematcher is None:
2053 2049 filematcher = lambda rev: match
2054 2050
2055 2051 expr = []
2056 2052 for op, val in sorted(opts.iteritems()):
2057 2053 if not val:
2058 2054 continue
2059 2055 if op not in opt2revset:
2060 2056 continue
2061 2057 revop, andor = opt2revset[op]
2062 2058 if '%(val)' not in revop:
2063 2059 expr.append(revop)
2064 2060 else:
2065 2061 if not isinstance(val, list):
2066 2062 e = revop % {'val': val}
2067 2063 else:
2068 2064 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2069 2065 expr.append(e)
2070 2066
2071 2067 if expr:
2072 2068 expr = '(' + ' and '.join(expr) + ')'
2073 2069 else:
2074 2070 expr = None
2075 2071 return expr, filematcher
2076 2072
2077 2073 def _logrevs(repo, opts):
2078 2074 # Default --rev value depends on --follow but --follow behavior
2079 2075 # depends on revisions resolved from --rev...
2080 2076 follow = opts.get('follow') or opts.get('follow_first')
2081 2077 if opts.get('rev'):
2082 2078 revs = scmutil.revrange(repo, opts['rev'])
2083 2079 elif follow and repo.dirstate.p1() == nullid:
2084 2080 revs = revset.baseset()
2085 2081 elif follow:
2086 2082 revs = repo.revs('reverse(:.)')
2087 2083 else:
2088 2084 revs = revset.spanset(repo)
2089 2085 revs.reverse()
2090 2086 return revs
2091 2087
2092 2088 def getgraphlogrevs(repo, pats, opts):
2093 2089 """Return (revs, expr, filematcher) where revs is an iterable of
2094 2090 revision numbers, expr is a revset string built from log options
2095 2091 and file patterns or None, and used to filter 'revs'. If --stat or
2096 2092 --patch are not passed filematcher is None. Otherwise it is a
2097 2093 callable taking a revision number and returning a match objects
2098 2094 filtering the files to be detailed when displaying the revision.
2099 2095 """
2100 2096 limit = loglimit(opts)
2101 2097 revs = _logrevs(repo, opts)
2102 2098 if not revs:
2103 2099 return revset.baseset(), None, None
2104 2100 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2105 2101 if opts.get('rev'):
2106 2102 # User-specified revs might be unsorted, but don't sort before
2107 2103 # _makelogrevset because it might depend on the order of revs
2108 2104 revs.sort(reverse=True)
2109 2105 if expr:
2110 2106 # Revset matchers often operate faster on revisions in changelog
2111 2107 # order, because most filters deal with the changelog.
2112 2108 revs.reverse()
2113 2109 matcher = revset.match(repo.ui, expr)
2114 2110 # Revset matches can reorder revisions. "A or B" typically returns
2115 2111 # returns the revision matching A then the revision matching B. Sort
2116 2112 # again to fix that.
2117 2113 revs = matcher(repo, revs)
2118 2114 revs.sort(reverse=True)
2119 2115 if limit is not None:
2120 2116 limitedrevs = []
2121 2117 for idx, rev in enumerate(revs):
2122 2118 if idx >= limit:
2123 2119 break
2124 2120 limitedrevs.append(rev)
2125 2121 revs = revset.baseset(limitedrevs)
2126 2122
2127 2123 return revs, expr, filematcher
2128 2124
2129 2125 def getlogrevs(repo, pats, opts):
2130 2126 """Return (revs, expr, filematcher) where revs is an iterable of
2131 2127 revision numbers, expr is a revset string built from log options
2132 2128 and file patterns or None, and used to filter 'revs'. If --stat or
2133 2129 --patch are not passed filematcher is None. Otherwise it is a
2134 2130 callable taking a revision number and returning a match objects
2135 2131 filtering the files to be detailed when displaying the revision.
2136 2132 """
2137 2133 limit = loglimit(opts)
2138 2134 revs = _logrevs(repo, opts)
2139 2135 if not revs:
2140 2136 return revset.baseset([]), None, None
2141 2137 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2142 2138 if expr:
2143 2139 # Revset matchers often operate faster on revisions in changelog
2144 2140 # order, because most filters deal with the changelog.
2145 2141 if not opts.get('rev'):
2146 2142 revs.reverse()
2147 2143 matcher = revset.match(repo.ui, expr)
2148 2144 # Revset matches can reorder revisions. "A or B" typically returns
2149 2145 # returns the revision matching A then the revision matching B. Sort
2150 2146 # again to fix that.
2151 2147 revs = matcher(repo, revs)
2152 2148 if not opts.get('rev'):
2153 2149 revs.sort(reverse=True)
2154 2150 if limit is not None:
2155 2151 limitedrevs = []
2156 2152 for idx, r in enumerate(revs):
2157 2153 if limit <= idx:
2158 2154 break
2159 2155 limitedrevs.append(r)
2160 2156 revs = revset.baseset(limitedrevs)
2161 2157
2162 2158 return revs, expr, filematcher
2163 2159
2164 2160 def displaygraph(ui, dag, displayer, showparents, edgefn, getrenamed=None,
2165 2161 filematcher=None):
2166 2162 seen, state = [], graphmod.asciistate()
2167 2163 for rev, type, ctx, parents in dag:
2168 2164 char = 'o'
2169 2165 if ctx.node() in showparents:
2170 2166 char = '@'
2171 2167 elif ctx.obsolete():
2172 2168 char = 'x'
2173 2169 elif ctx.closesbranch():
2174 2170 char = '_'
2175 2171 copies = None
2176 2172 if getrenamed and ctx.rev():
2177 2173 copies = []
2178 2174 for fn in ctx.files():
2179 2175 rename = getrenamed(fn, ctx.rev())
2180 2176 if rename:
2181 2177 copies.append((fn, rename[0]))
2182 2178 revmatchfn = None
2183 2179 if filematcher is not None:
2184 2180 revmatchfn = filematcher(ctx.rev())
2185 2181 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2186 2182 lines = displayer.hunk.pop(rev).split('\n')
2187 2183 if not lines[-1]:
2188 2184 del lines[-1]
2189 2185 displayer.flush(ctx)
2190 2186 edges = edgefn(type, char, lines, seen, rev, parents)
2191 2187 for type, char, lines, coldata in edges:
2192 2188 graphmod.ascii(ui, state, type, char, lines, coldata)
2193 2189 displayer.close()
2194 2190
2195 2191 def graphlog(ui, repo, *pats, **opts):
2196 2192 # Parameters are identical to log command ones
2197 2193 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2198 2194 revdag = graphmod.dagwalker(repo, revs)
2199 2195
2200 2196 getrenamed = None
2201 2197 if opts.get('copies'):
2202 2198 endrev = None
2203 2199 if opts.get('rev'):
2204 2200 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2205 2201 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2206 2202 displayer = show_changeset(ui, repo, opts, buffered=True)
2207 2203 showparents = [ctx.node() for ctx in repo[None].parents()]
2208 2204 displaygraph(ui, revdag, displayer, showparents,
2209 2205 graphmod.asciiedges, getrenamed, filematcher)
2210 2206
2211 2207 def checkunsupportedgraphflags(pats, opts):
2212 2208 for op in ["newest_first"]:
2213 2209 if op in opts and opts[op]:
2214 2210 raise util.Abort(_("-G/--graph option is incompatible with --%s")
2215 2211 % op.replace("_", "-"))
2216 2212
2217 2213 def graphrevs(repo, nodes, opts):
2218 2214 limit = loglimit(opts)
2219 2215 nodes.reverse()
2220 2216 if limit is not None:
2221 2217 nodes = nodes[:limit]
2222 2218 return graphmod.nodes(repo, nodes)
2223 2219
2224 2220 def add(ui, repo, match, prefix, explicitonly, **opts):
2225 2221 join = lambda f: os.path.join(prefix, f)
2226 2222 bad = []
2227 2223
2228 2224 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2229 2225 names = []
2230 2226 wctx = repo[None]
2231 2227 cca = None
2232 2228 abort, warn = scmutil.checkportabilityalert(ui)
2233 2229 if abort or warn:
2234 2230 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2235 2231
2236 2232 badmatch = matchmod.badmatch(match, badfn)
2237 2233 dirstate = repo.dirstate
2238 2234 # We don't want to just call wctx.walk here, since it would return a lot of
2239 2235 # clean files, which we aren't interested in and takes time.
2240 2236 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2241 2237 True, False, full=False)):
2242 2238 exact = match.exact(f)
2243 2239 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2244 2240 if cca:
2245 2241 cca(f)
2246 2242 names.append(f)
2247 2243 if ui.verbose or not exact:
2248 2244 ui.status(_('adding %s\n') % match.rel(f))
2249 2245
2250 2246 for subpath in sorted(wctx.substate):
2251 2247 sub = wctx.sub(subpath)
2252 2248 try:
2253 2249 submatch = matchmod.narrowmatcher(subpath, match)
2254 2250 if opts.get('subrepos'):
2255 2251 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2256 2252 else:
2257 2253 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2258 2254 except error.LookupError:
2259 2255 ui.status(_("skipping missing subrepository: %s\n")
2260 2256 % join(subpath))
2261 2257
2262 2258 if not opts.get('dry_run'):
2263 2259 rejected = wctx.add(names, prefix)
2264 2260 bad.extend(f for f in rejected if f in match.files())
2265 2261 return bad
2266 2262
2267 2263 def forget(ui, repo, match, prefix, explicitonly):
2268 2264 join = lambda f: os.path.join(prefix, f)
2269 2265 bad = []
2270 2266 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2271 2267 wctx = repo[None]
2272 2268 forgot = []
2273 2269
2274 2270 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2275 2271 forget = sorted(s[0] + s[1] + s[3] + s[6])
2276 2272 if explicitonly:
2277 2273 forget = [f for f in forget if match.exact(f)]
2278 2274
2279 2275 for subpath in sorted(wctx.substate):
2280 2276 sub = wctx.sub(subpath)
2281 2277 try:
2282 2278 submatch = matchmod.narrowmatcher(subpath, match)
2283 2279 subbad, subforgot = sub.forget(submatch, prefix)
2284 2280 bad.extend([subpath + '/' + f for f in subbad])
2285 2281 forgot.extend([subpath + '/' + f for f in subforgot])
2286 2282 except error.LookupError:
2287 2283 ui.status(_("skipping missing subrepository: %s\n")
2288 2284 % join(subpath))
2289 2285
2290 2286 if not explicitonly:
2291 2287 for f in match.files():
2292 2288 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2293 2289 if f not in forgot:
2294 2290 if repo.wvfs.exists(f):
2295 2291 # Don't complain if the exact case match wasn't given.
2296 2292 # But don't do this until after checking 'forgot', so
2297 2293 # that subrepo files aren't normalized, and this op is
2298 2294 # purely from data cached by the status walk above.
2299 2295 if repo.dirstate.normalize(f) in repo.dirstate:
2300 2296 continue
2301 2297 ui.warn(_('not removing %s: '
2302 2298 'file is already untracked\n')
2303 2299 % match.rel(f))
2304 2300 bad.append(f)
2305 2301
2306 2302 for f in forget:
2307 2303 if ui.verbose or not match.exact(f):
2308 2304 ui.status(_('removing %s\n') % match.rel(f))
2309 2305
2310 2306 rejected = wctx.forget(forget, prefix)
2311 2307 bad.extend(f for f in rejected if f in match.files())
2312 2308 forgot.extend(f for f in forget if f not in rejected)
2313 2309 return bad, forgot
2314 2310
2315 2311 def files(ui, ctx, m, fm, fmt, subrepos):
2316 2312 rev = ctx.rev()
2317 2313 ret = 1
2318 2314 ds = ctx.repo().dirstate
2319 2315
2320 2316 for f in ctx.matches(m):
2321 2317 if rev is None and ds[f] == 'r':
2322 2318 continue
2323 2319 fm.startitem()
2324 2320 if ui.verbose:
2325 2321 fc = ctx[f]
2326 2322 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2327 2323 fm.data(abspath=f)
2328 2324 fm.write('path', fmt, m.rel(f))
2329 2325 ret = 0
2330 2326
2331 2327 for subpath in sorted(ctx.substate):
2332 2328 def matchessubrepo(subpath):
2333 2329 return (m.always() or m.exact(subpath)
2334 2330 or any(f.startswith(subpath + '/') for f in m.files()))
2335 2331
2336 2332 if subrepos or matchessubrepo(subpath):
2337 2333 sub = ctx.sub(subpath)
2338 2334 try:
2339 2335 submatch = matchmod.narrowmatcher(subpath, m)
2340 2336 if sub.printfiles(ui, submatch, fm, fmt, subrepos) == 0:
2341 2337 ret = 0
2342 2338 except error.LookupError:
2343 2339 ui.status(_("skipping missing subrepository: %s\n")
2344 2340 % m.abs(subpath))
2345 2341
2346 2342 return ret
2347 2343
2348 2344 def remove(ui, repo, m, prefix, after, force, subrepos):
2349 2345 join = lambda f: os.path.join(prefix, f)
2350 2346 ret = 0
2351 2347 s = repo.status(match=m, clean=True)
2352 2348 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2353 2349
2354 2350 wctx = repo[None]
2355 2351
2356 2352 for subpath in sorted(wctx.substate):
2357 2353 def matchessubrepo(matcher, subpath):
2358 2354 if matcher.exact(subpath):
2359 2355 return True
2360 2356 for f in matcher.files():
2361 2357 if f.startswith(subpath):
2362 2358 return True
2363 2359 return False
2364 2360
2365 2361 if subrepos or matchessubrepo(m, subpath):
2366 2362 sub = wctx.sub(subpath)
2367 2363 try:
2368 2364 submatch = matchmod.narrowmatcher(subpath, m)
2369 2365 if sub.removefiles(submatch, prefix, after, force, subrepos):
2370 2366 ret = 1
2371 2367 except error.LookupError:
2372 2368 ui.status(_("skipping missing subrepository: %s\n")
2373 2369 % join(subpath))
2374 2370
2375 2371 # warn about failure to delete explicit files/dirs
2376 2372 deleteddirs = util.dirs(deleted)
2377 2373 for f in m.files():
2378 2374 def insubrepo():
2379 2375 for subpath in wctx.substate:
2380 2376 if f.startswith(subpath):
2381 2377 return True
2382 2378 return False
2383 2379
2384 2380 isdir = f in deleteddirs or wctx.hasdir(f)
2385 2381 if f in repo.dirstate or isdir or f == '.' or insubrepo():
2386 2382 continue
2387 2383
2388 2384 if repo.wvfs.exists(f):
2389 2385 if repo.wvfs.isdir(f):
2390 2386 ui.warn(_('not removing %s: no tracked files\n')
2391 2387 % m.rel(f))
2392 2388 else:
2393 2389 ui.warn(_('not removing %s: file is untracked\n')
2394 2390 % m.rel(f))
2395 2391 # missing files will generate a warning elsewhere
2396 2392 ret = 1
2397 2393
2398 2394 if force:
2399 2395 list = modified + deleted + clean + added
2400 2396 elif after:
2401 2397 list = deleted
2402 2398 for f in modified + added + clean:
2403 2399 ui.warn(_('not removing %s: file still exists\n') % m.rel(f))
2404 2400 ret = 1
2405 2401 else:
2406 2402 list = deleted + clean
2407 2403 for f in modified:
2408 2404 ui.warn(_('not removing %s: file is modified (use -f'
2409 2405 ' to force removal)\n') % m.rel(f))
2410 2406 ret = 1
2411 2407 for f in added:
2412 2408 ui.warn(_('not removing %s: file has been marked for add'
2413 2409 ' (use forget to undo)\n') % m.rel(f))
2414 2410 ret = 1
2415 2411
2416 2412 for f in sorted(list):
2417 2413 if ui.verbose or not m.exact(f):
2418 2414 ui.status(_('removing %s\n') % m.rel(f))
2419 2415
2420 2416 wlock = repo.wlock()
2421 2417 try:
2422 2418 if not after:
2423 2419 for f in list:
2424 2420 if f in added:
2425 2421 continue # we never unlink added files on remove
2426 2422 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
2427 2423 repo[None].forget(list)
2428 2424 finally:
2429 2425 wlock.release()
2430 2426
2431 2427 return ret
2432 2428
2433 2429 def cat(ui, repo, ctx, matcher, prefix, **opts):
2434 2430 err = 1
2435 2431
2436 2432 def write(path):
2437 2433 fp = makefileobj(repo, opts.get('output'), ctx.node(),
2438 2434 pathname=os.path.join(prefix, path))
2439 2435 data = ctx[path].data()
2440 2436 if opts.get('decode'):
2441 2437 data = repo.wwritedata(path, data)
2442 2438 fp.write(data)
2443 2439 fp.close()
2444 2440
2445 2441 # Automation often uses hg cat on single files, so special case it
2446 2442 # for performance to avoid the cost of parsing the manifest.
2447 2443 if len(matcher.files()) == 1 and not matcher.anypats():
2448 2444 file = matcher.files()[0]
2449 2445 mf = repo.manifest
2450 2446 mfnode = ctx.manifestnode()
2451 2447 if mfnode and mf.find(mfnode, file)[0]:
2452 2448 write(file)
2453 2449 return 0
2454 2450
2455 2451 # Don't warn about "missing" files that are really in subrepos
2456 2452 def badfn(path, msg):
2457 2453 for subpath in ctx.substate:
2458 2454 if path.startswith(subpath):
2459 2455 return
2460 2456 matcher.bad(path, msg)
2461 2457
2462 2458 for abs in ctx.walk(matchmod.badmatch(matcher, badfn)):
2463 2459 write(abs)
2464 2460 err = 0
2465 2461
2466 2462 for subpath in sorted(ctx.substate):
2467 2463 sub = ctx.sub(subpath)
2468 2464 try:
2469 2465 submatch = matchmod.narrowmatcher(subpath, matcher)
2470 2466
2471 2467 if not sub.cat(submatch, os.path.join(prefix, sub._path),
2472 2468 **opts):
2473 2469 err = 0
2474 2470 except error.RepoLookupError:
2475 2471 ui.status(_("skipping missing subrepository: %s\n")
2476 2472 % os.path.join(prefix, subpath))
2477 2473
2478 2474 return err
2479 2475
2480 2476 def commit(ui, repo, commitfunc, pats, opts):
2481 2477 '''commit the specified files or all outstanding changes'''
2482 2478 date = opts.get('date')
2483 2479 if date:
2484 2480 opts['date'] = util.parsedate(date)
2485 2481 message = logmessage(ui, opts)
2486 2482 matcher = scmutil.match(repo[None], pats, opts)
2487 2483
2488 2484 # extract addremove carefully -- this function can be called from a command
2489 2485 # that doesn't support addremove
2490 2486 if opts.get('addremove'):
2491 2487 if scmutil.addremove(repo, matcher, "", opts) != 0:
2492 2488 raise util.Abort(
2493 2489 _("failed to mark all new/missing files as added/removed"))
2494 2490
2495 2491 return commitfunc(ui, repo, message, matcher, opts)
2496 2492
2497 2493 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2498 2494 # avoid cycle context -> subrepo -> cmdutil
2499 2495 import context
2500 2496
2501 2497 # amend will reuse the existing user if not specified, but the obsolete
2502 2498 # marker creation requires that the current user's name is specified.
2503 2499 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2504 2500 ui.username() # raise exception if username not set
2505 2501
2506 2502 ui.note(_('amending changeset %s\n') % old)
2507 2503 base = old.p1()
2508 2504 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2509 2505
2510 2506 wlock = lock = newid = None
2511 2507 try:
2512 2508 wlock = repo.wlock()
2513 2509 lock = repo.lock()
2514 2510 tr = repo.transaction('amend')
2515 2511 try:
2516 2512 # See if we got a message from -m or -l, if not, open the editor
2517 2513 # with the message of the changeset to amend
2518 2514 message = logmessage(ui, opts)
2519 2515 # ensure logfile does not conflict with later enforcement of the
2520 2516 # message. potential logfile content has been processed by
2521 2517 # `logmessage` anyway.
2522 2518 opts.pop('logfile')
2523 2519 # First, do a regular commit to record all changes in the working
2524 2520 # directory (if there are any)
2525 2521 ui.callhooks = False
2526 2522 activebookmark = repo._activebookmark
2527 2523 try:
2528 2524 repo._activebookmark = None
2529 2525 opts['message'] = 'temporary amend commit for %s' % old
2530 2526 node = commit(ui, repo, commitfunc, pats, opts)
2531 2527 finally:
2532 2528 repo._activebookmark = activebookmark
2533 2529 ui.callhooks = True
2534 2530 ctx = repo[node]
2535 2531
2536 2532 # Participating changesets:
2537 2533 #
2538 2534 # node/ctx o - new (intermediate) commit that contains changes
2539 2535 # | from working dir to go into amending commit
2540 2536 # | (or a workingctx if there were no changes)
2541 2537 # |
2542 2538 # old o - changeset to amend
2543 2539 # |
2544 2540 # base o - parent of amending changeset
2545 2541
2546 2542 # Update extra dict from amended commit (e.g. to preserve graft
2547 2543 # source)
2548 2544 extra.update(old.extra())
2549 2545
2550 2546 # Also update it from the intermediate commit or from the wctx
2551 2547 extra.update(ctx.extra())
2552 2548
2553 2549 if len(old.parents()) > 1:
2554 2550 # ctx.files() isn't reliable for merges, so fall back to the
2555 2551 # slower repo.status() method
2556 2552 files = set([fn for st in repo.status(base, old)[:3]
2557 2553 for fn in st])
2558 2554 else:
2559 2555 files = set(old.files())
2560 2556
2561 2557 # Second, we use either the commit we just did, or if there were no
2562 2558 # changes the parent of the working directory as the version of the
2563 2559 # files in the final amend commit
2564 2560 if node:
2565 2561 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2566 2562
2567 2563 user = ctx.user()
2568 2564 date = ctx.date()
2569 2565 # Recompute copies (avoid recording a -> b -> a)
2570 2566 copied = copies.pathcopies(base, ctx)
2571 2567 if old.p2:
2572 2568 copied.update(copies.pathcopies(old.p2(), ctx))
2573 2569
2574 2570 # Prune files which were reverted by the updates: if old
2575 2571 # introduced file X and our intermediate commit, node,
2576 2572 # renamed that file, then those two files are the same and
2577 2573 # we can discard X from our list of files. Likewise if X
2578 2574 # was deleted, it's no longer relevant
2579 2575 files.update(ctx.files())
2580 2576
2581 2577 def samefile(f):
2582 2578 if f in ctx.manifest():
2583 2579 a = ctx.filectx(f)
2584 2580 if f in base.manifest():
2585 2581 b = base.filectx(f)
2586 2582 return (not a.cmp(b)
2587 2583 and a.flags() == b.flags())
2588 2584 else:
2589 2585 return False
2590 2586 else:
2591 2587 return f not in base.manifest()
2592 2588 files = [f for f in files if not samefile(f)]
2593 2589
2594 2590 def filectxfn(repo, ctx_, path):
2595 2591 try:
2596 2592 fctx = ctx[path]
2597 2593 flags = fctx.flags()
2598 2594 mctx = context.memfilectx(repo,
2599 2595 fctx.path(), fctx.data(),
2600 2596 islink='l' in flags,
2601 2597 isexec='x' in flags,
2602 2598 copied=copied.get(path))
2603 2599 return mctx
2604 2600 except KeyError:
2605 2601 return None
2606 2602 else:
2607 2603 ui.note(_('copying changeset %s to %s\n') % (old, base))
2608 2604
2609 2605 # Use version of files as in the old cset
2610 2606 def filectxfn(repo, ctx_, path):
2611 2607 try:
2612 2608 return old.filectx(path)
2613 2609 except KeyError:
2614 2610 return None
2615 2611
2616 2612 user = opts.get('user') or old.user()
2617 2613 date = opts.get('date') or old.date()
2618 2614 editform = mergeeditform(old, 'commit.amend')
2619 2615 editor = getcommiteditor(editform=editform, **opts)
2620 2616 if not message:
2621 2617 editor = getcommiteditor(edit=True, editform=editform)
2622 2618 message = old.description()
2623 2619
2624 2620 pureextra = extra.copy()
2625 2621 extra['amend_source'] = old.hex()
2626 2622
2627 2623 new = context.memctx(repo,
2628 2624 parents=[base.node(), old.p2().node()],
2629 2625 text=message,
2630 2626 files=files,
2631 2627 filectxfn=filectxfn,
2632 2628 user=user,
2633 2629 date=date,
2634 2630 extra=extra,
2635 2631 editor=editor)
2636 2632
2637 2633 newdesc = changelog.stripdesc(new.description())
2638 2634 if ((not node)
2639 2635 and newdesc == old.description()
2640 2636 and user == old.user()
2641 2637 and date == old.date()
2642 2638 and pureextra == old.extra()):
2643 2639 # nothing changed. continuing here would create a new node
2644 2640 # anyway because of the amend_source noise.
2645 2641 #
2646 2642 # This not what we expect from amend.
2647 2643 return old.node()
2648 2644
2649 2645 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2650 2646 try:
2651 2647 if opts.get('secret'):
2652 2648 commitphase = 'secret'
2653 2649 else:
2654 2650 commitphase = old.phase()
2655 2651 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2656 2652 newid = repo.commitctx(new)
2657 2653 finally:
2658 2654 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2659 2655 if newid != old.node():
2660 2656 # Reroute the working copy parent to the new changeset
2661 2657 repo.setparents(newid, nullid)
2662 2658
2663 2659 # Move bookmarks from old parent to amend commit
2664 2660 bms = repo.nodebookmarks(old.node())
2665 2661 if bms:
2666 2662 marks = repo._bookmarks
2667 2663 for bm in bms:
2668 2664 ui.debug('moving bookmarks %r from %s to %s\n' %
2669 2665 (marks, old.hex(), hex(newid)))
2670 2666 marks[bm] = newid
2671 2667 marks.recordchange(tr)
2672 2668 #commit the whole amend process
2673 2669 if createmarkers:
2674 2670 # mark the new changeset as successor of the rewritten one
2675 2671 new = repo[newid]
2676 2672 obs = [(old, (new,))]
2677 2673 if node:
2678 2674 obs.append((ctx, ()))
2679 2675
2680 2676 obsolete.createmarkers(repo, obs)
2681 2677 tr.close()
2682 2678 finally:
2683 2679 tr.release()
2684 2680 if not createmarkers and newid != old.node():
2685 2681 # Strip the intermediate commit (if there was one) and the amended
2686 2682 # commit
2687 2683 if node:
2688 2684 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2689 2685 ui.note(_('stripping amended changeset %s\n') % old)
2690 2686 repair.strip(ui, repo, old.node(), topic='amend-backup')
2691 2687 finally:
2692 2688 lockmod.release(lock, wlock)
2693 2689 return newid
2694 2690
2695 2691 def commiteditor(repo, ctx, subs, editform=''):
2696 2692 if ctx.description():
2697 2693 return ctx.description()
2698 2694 return commitforceeditor(repo, ctx, subs, editform=editform)
2699 2695
2700 2696 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2701 2697 editform=''):
2702 2698 if not extramsg:
2703 2699 extramsg = _("Leave message empty to abort commit.")
2704 2700
2705 2701 forms = [e for e in editform.split('.') if e]
2706 2702 forms.insert(0, 'changeset')
2707 2703 while forms:
2708 2704 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2709 2705 if tmpl:
2710 2706 committext = buildcommittemplate(repo, ctx, subs, extramsg, tmpl)
2711 2707 break
2712 2708 forms.pop()
2713 2709 else:
2714 2710 committext = buildcommittext(repo, ctx, subs, extramsg)
2715 2711
2716 2712 # run editor in the repository root
2717 2713 olddir = os.getcwd()
2718 2714 os.chdir(repo.root)
2719 2715 text = repo.ui.edit(committext, ctx.user(), ctx.extra(), editform=editform)
2720 2716 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2721 2717 os.chdir(olddir)
2722 2718
2723 2719 if finishdesc:
2724 2720 text = finishdesc(text)
2725 2721 if not text.strip():
2726 2722 raise util.Abort(_("empty commit message"))
2727 2723
2728 2724 return text
2729 2725
2730 2726 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2731 2727 ui = repo.ui
2732 2728 tmpl, mapfile = gettemplate(ui, tmpl, None)
2733 2729
2734 2730 try:
2735 2731 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2736 2732 except SyntaxError as inst:
2737 2733 raise util.Abort(inst.args[0])
2738 2734
2739 2735 for k, v in repo.ui.configitems('committemplate'):
2740 2736 if k != 'changeset':
2741 2737 t.t.cache[k] = v
2742 2738
2743 2739 if not extramsg:
2744 2740 extramsg = '' # ensure that extramsg is string
2745 2741
2746 2742 ui.pushbuffer()
2747 2743 t.show(ctx, extramsg=extramsg)
2748 2744 return ui.popbuffer()
2749 2745
2750 2746 def hgprefix(msg):
2751 2747 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2752 2748
2753 2749 def buildcommittext(repo, ctx, subs, extramsg):
2754 2750 edittext = []
2755 2751 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2756 2752 if ctx.description():
2757 2753 edittext.append(ctx.description())
2758 2754 edittext.append("")
2759 2755 edittext.append("") # Empty line between message and comments.
2760 2756 edittext.append(hgprefix(_("Enter commit message."
2761 2757 " Lines beginning with 'HG:' are removed.")))
2762 2758 edittext.append(hgprefix(extramsg))
2763 2759 edittext.append("HG: --")
2764 2760 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2765 2761 if ctx.p2():
2766 2762 edittext.append(hgprefix(_("branch merge")))
2767 2763 if ctx.branch():
2768 2764 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2769 2765 if bookmarks.isactivewdirparent(repo):
2770 2766 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2771 2767 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2772 2768 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2773 2769 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2774 2770 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2775 2771 if not added and not modified and not removed:
2776 2772 edittext.append(hgprefix(_("no files changed")))
2777 2773 edittext.append("")
2778 2774
2779 2775 return "\n".join(edittext)
2780 2776
2781 2777 def commitstatus(repo, node, branch, bheads=None, opts=None):
2782 2778 if opts is None:
2783 2779 opts = {}
2784 2780 ctx = repo[node]
2785 2781 parents = ctx.parents()
2786 2782
2787 2783 if (not opts.get('amend') and bheads and node not in bheads and not
2788 2784 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2789 2785 repo.ui.status(_('created new head\n'))
2790 2786 # The message is not printed for initial roots. For the other
2791 2787 # changesets, it is printed in the following situations:
2792 2788 #
2793 2789 # Par column: for the 2 parents with ...
2794 2790 # N: null or no parent
2795 2791 # B: parent is on another named branch
2796 2792 # C: parent is a regular non head changeset
2797 2793 # H: parent was a branch head of the current branch
2798 2794 # Msg column: whether we print "created new head" message
2799 2795 # In the following, it is assumed that there already exists some
2800 2796 # initial branch heads of the current branch, otherwise nothing is
2801 2797 # printed anyway.
2802 2798 #
2803 2799 # Par Msg Comment
2804 2800 # N N y additional topo root
2805 2801 #
2806 2802 # B N y additional branch root
2807 2803 # C N y additional topo head
2808 2804 # H N n usual case
2809 2805 #
2810 2806 # B B y weird additional branch root
2811 2807 # C B y branch merge
2812 2808 # H B n merge with named branch
2813 2809 #
2814 2810 # C C y additional head from merge
2815 2811 # C H n merge with a head
2816 2812 #
2817 2813 # H H n head merge: head count decreases
2818 2814
2819 2815 if not opts.get('close_branch'):
2820 2816 for r in parents:
2821 2817 if r.closesbranch() and r.branch() == branch:
2822 2818 repo.ui.status(_('reopening closed branch head %d\n') % r)
2823 2819
2824 2820 if repo.ui.debugflag:
2825 2821 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2826 2822 elif repo.ui.verbose:
2827 2823 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2828 2824
2829 2825 def revert(ui, repo, ctx, parents, *pats, **opts):
2830 2826 parent, p2 = parents
2831 2827 node = ctx.node()
2832 2828
2833 2829 mf = ctx.manifest()
2834 2830 if node == p2:
2835 2831 parent = p2
2836 2832 if node == parent:
2837 2833 pmf = mf
2838 2834 else:
2839 2835 pmf = None
2840 2836
2841 2837 # need all matching names in dirstate and manifest of target rev,
2842 2838 # so have to walk both. do not print errors if files exist in one
2843 2839 # but not other. in both cases, filesets should be evaluated against
2844 2840 # workingctx to get consistent result (issue4497). this means 'set:**'
2845 2841 # cannot be used to select missing files from target rev.
2846 2842
2847 2843 # `names` is a mapping for all elements in working copy and target revision
2848 2844 # The mapping is in the form:
2849 2845 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2850 2846 names = {}
2851 2847
2852 2848 wlock = repo.wlock()
2853 2849 try:
2854 2850 ## filling of the `names` mapping
2855 2851 # walk dirstate to fill `names`
2856 2852
2857 2853 interactive = opts.get('interactive', False)
2858 2854 wctx = repo[None]
2859 2855 m = scmutil.match(wctx, pats, opts)
2860 2856
2861 2857 # we'll need this later
2862 2858 targetsubs = sorted(s for s in wctx.substate if m(s))
2863 2859
2864 2860 if not m.always():
2865 2861 for abs in repo.walk(matchmod.badmatch(m, lambda x, y: False)):
2866 2862 names[abs] = m.rel(abs), m.exact(abs)
2867 2863
2868 2864 # walk target manifest to fill `names`
2869 2865
2870 2866 def badfn(path, msg):
2871 2867 if path in names:
2872 2868 return
2873 2869 if path in ctx.substate:
2874 2870 return
2875 2871 path_ = path + '/'
2876 2872 for f in names:
2877 2873 if f.startswith(path_):
2878 2874 return
2879 2875 ui.warn("%s: %s\n" % (m.rel(path), msg))
2880 2876
2881 2877 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
2882 2878 if abs not in names:
2883 2879 names[abs] = m.rel(abs), m.exact(abs)
2884 2880
2885 2881 # Find status of all file in `names`.
2886 2882 m = scmutil.matchfiles(repo, names)
2887 2883
2888 2884 changes = repo.status(node1=node, match=m,
2889 2885 unknown=True, ignored=True, clean=True)
2890 2886 else:
2891 2887 changes = repo.status(node1=node, match=m)
2892 2888 for kind in changes:
2893 2889 for abs in kind:
2894 2890 names[abs] = m.rel(abs), m.exact(abs)
2895 2891
2896 2892 m = scmutil.matchfiles(repo, names)
2897 2893
2898 2894 modified = set(changes.modified)
2899 2895 added = set(changes.added)
2900 2896 removed = set(changes.removed)
2901 2897 _deleted = set(changes.deleted)
2902 2898 unknown = set(changes.unknown)
2903 2899 unknown.update(changes.ignored)
2904 2900 clean = set(changes.clean)
2905 2901 modadded = set()
2906 2902
2907 2903 # split between files known in target manifest and the others
2908 2904 smf = set(mf)
2909 2905
2910 2906 # determine the exact nature of the deleted changesets
2911 2907 deladded = _deleted - smf
2912 2908 deleted = _deleted - deladded
2913 2909
2914 2910 # We need to account for the state of the file in the dirstate,
2915 2911 # even when we revert against something else than parent. This will
2916 2912 # slightly alter the behavior of revert (doing back up or not, delete
2917 2913 # or just forget etc).
2918 2914 if parent == node:
2919 2915 dsmodified = modified
2920 2916 dsadded = added
2921 2917 dsremoved = removed
2922 2918 # store all local modifications, useful later for rename detection
2923 2919 localchanges = dsmodified | dsadded
2924 2920 modified, added, removed = set(), set(), set()
2925 2921 else:
2926 2922 changes = repo.status(node1=parent, match=m)
2927 2923 dsmodified = set(changes.modified)
2928 2924 dsadded = set(changes.added)
2929 2925 dsremoved = set(changes.removed)
2930 2926 # store all local modifications, useful later for rename detection
2931 2927 localchanges = dsmodified | dsadded
2932 2928
2933 2929 # only take into account for removes between wc and target
2934 2930 clean |= dsremoved - removed
2935 2931 dsremoved &= removed
2936 2932 # distinct between dirstate remove and other
2937 2933 removed -= dsremoved
2938 2934
2939 2935 modadded = added & dsmodified
2940 2936 added -= modadded
2941 2937
2942 2938 # tell newly modified apart.
2943 2939 dsmodified &= modified
2944 2940 dsmodified |= modified & dsadded # dirstate added may needs backup
2945 2941 modified -= dsmodified
2946 2942
2947 2943 # We need to wait for some post-processing to update this set
2948 2944 # before making the distinction. The dirstate will be used for
2949 2945 # that purpose.
2950 2946 dsadded = added
2951 2947
2952 2948 # in case of merge, files that are actually added can be reported as
2953 2949 # modified, we need to post process the result
2954 2950 if p2 != nullid:
2955 2951 if pmf is None:
2956 2952 # only need parent manifest in the merge case,
2957 2953 # so do not read by default
2958 2954 pmf = repo[parent].manifest()
2959 2955 mergeadd = dsmodified - set(pmf)
2960 2956 dsadded |= mergeadd
2961 2957 dsmodified -= mergeadd
2962 2958
2963 2959 # if f is a rename, update `names` to also revert the source
2964 2960 cwd = repo.getcwd()
2965 2961 for f in localchanges:
2966 2962 src = repo.dirstate.copied(f)
2967 2963 # XXX should we check for rename down to target node?
2968 2964 if src and src not in names and repo.dirstate[src] == 'r':
2969 2965 dsremoved.add(src)
2970 2966 names[src] = (repo.pathto(src, cwd), True)
2971 2967
2972 2968 # distinguish between file to forget and the other
2973 2969 added = set()
2974 2970 for abs in dsadded:
2975 2971 if repo.dirstate[abs] != 'a':
2976 2972 added.add(abs)
2977 2973 dsadded -= added
2978 2974
2979 2975 for abs in deladded:
2980 2976 if repo.dirstate[abs] == 'a':
2981 2977 dsadded.add(abs)
2982 2978 deladded -= dsadded
2983 2979
2984 2980 # For files marked as removed, we check if an unknown file is present at
2985 2981 # the same path. If a such file exists it may need to be backed up.
2986 2982 # Making the distinction at this stage helps have simpler backup
2987 2983 # logic.
2988 2984 removunk = set()
2989 2985 for abs in removed:
2990 2986 target = repo.wjoin(abs)
2991 2987 if os.path.lexists(target):
2992 2988 removunk.add(abs)
2993 2989 removed -= removunk
2994 2990
2995 2991 dsremovunk = set()
2996 2992 for abs in dsremoved:
2997 2993 target = repo.wjoin(abs)
2998 2994 if os.path.lexists(target):
2999 2995 dsremovunk.add(abs)
3000 2996 dsremoved -= dsremovunk
3001 2997
3002 2998 # action to be actually performed by revert
3003 2999 # (<list of file>, message>) tuple
3004 3000 actions = {'revert': ([], _('reverting %s\n')),
3005 3001 'add': ([], _('adding %s\n')),
3006 3002 'remove': ([], _('removing %s\n')),
3007 3003 'drop': ([], _('removing %s\n')),
3008 3004 'forget': ([], _('forgetting %s\n')),
3009 3005 'undelete': ([], _('undeleting %s\n')),
3010 3006 'noop': (None, _('no changes needed to %s\n')),
3011 3007 'unknown': (None, _('file not managed: %s\n')),
3012 3008 }
3013 3009
3014 3010 # "constant" that convey the backup strategy.
3015 3011 # All set to `discard` if `no-backup` is set do avoid checking
3016 3012 # no_backup lower in the code.
3017 3013 # These values are ordered for comparison purposes
3018 3014 backup = 2 # unconditionally do backup
3019 3015 check = 1 # check if the existing file differs from target
3020 3016 discard = 0 # never do backup
3021 3017 if opts.get('no_backup'):
3022 3018 backup = check = discard
3023 3019
3024 3020 backupanddel = actions['remove']
3025 3021 if not opts.get('no_backup'):
3026 3022 backupanddel = actions['drop']
3027 3023
3028 3024 disptable = (
3029 3025 # dispatch table:
3030 3026 # file state
3031 3027 # action
3032 3028 # make backup
3033 3029
3034 3030 ## Sets that results that will change file on disk
3035 3031 # Modified compared to target, no local change
3036 3032 (modified, actions['revert'], discard),
3037 3033 # Modified compared to target, but local file is deleted
3038 3034 (deleted, actions['revert'], discard),
3039 3035 # Modified compared to target, local change
3040 3036 (dsmodified, actions['revert'], backup),
3041 3037 # Added since target
3042 3038 (added, actions['remove'], discard),
3043 3039 # Added in working directory
3044 3040 (dsadded, actions['forget'], discard),
3045 3041 # Added since target, have local modification
3046 3042 (modadded, backupanddel, backup),
3047 3043 # Added since target but file is missing in working directory
3048 3044 (deladded, actions['drop'], discard),
3049 3045 # Removed since target, before working copy parent
3050 3046 (removed, actions['add'], discard),
3051 3047 # Same as `removed` but an unknown file exists at the same path
3052 3048 (removunk, actions['add'], check),
3053 3049 # Removed since targe, marked as such in working copy parent
3054 3050 (dsremoved, actions['undelete'], discard),
3055 3051 # Same as `dsremoved` but an unknown file exists at the same path
3056 3052 (dsremovunk, actions['undelete'], check),
3057 3053 ## the following sets does not result in any file changes
3058 3054 # File with no modification
3059 3055 (clean, actions['noop'], discard),
3060 3056 # Existing file, not tracked anywhere
3061 3057 (unknown, actions['unknown'], discard),
3062 3058 )
3063 3059
3064 3060 for abs, (rel, exact) in sorted(names.items()):
3065 3061 # target file to be touch on disk (relative to cwd)
3066 3062 target = repo.wjoin(abs)
3067 3063 # search the entry in the dispatch table.
3068 3064 # if the file is in any of these sets, it was touched in the working
3069 3065 # directory parent and we are sure it needs to be reverted.
3070 3066 for table, (xlist, msg), dobackup in disptable:
3071 3067 if abs not in table:
3072 3068 continue
3073 3069 if xlist is not None:
3074 3070 xlist.append(abs)
3075 3071 if dobackup and (backup <= dobackup
3076 3072 or wctx[abs].cmp(ctx[abs])):
3077 3073 bakname = "%s.orig" % rel
3078 3074 ui.note(_('saving current version of %s as %s\n') %
3079 3075 (rel, bakname))
3080 3076 if not opts.get('dry_run'):
3081 3077 if interactive:
3082 3078 util.copyfile(target, bakname)
3083 3079 else:
3084 3080 util.rename(target, bakname)
3085 3081 if ui.verbose or not exact:
3086 3082 if not isinstance(msg, basestring):
3087 3083 msg = msg(abs)
3088 3084 ui.status(msg % rel)
3089 3085 elif exact:
3090 3086 ui.warn(msg % rel)
3091 3087 break
3092 3088
3093 3089 if not opts.get('dry_run'):
3094 3090 needdata = ('revert', 'add', 'undelete')
3095 3091 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3096 3092 _performrevert(repo, parents, ctx, actions, interactive)
3097 3093
3098 3094 if targetsubs:
3099 3095 # Revert the subrepos on the revert list
3100 3096 for sub in targetsubs:
3101 3097 try:
3102 3098 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3103 3099 except KeyError:
3104 3100 raise util.Abort("subrepository '%s' does not exist in %s!"
3105 3101 % (sub, short(ctx.node())))
3106 3102 finally:
3107 3103 wlock.release()
3108 3104
3109 3105 def _revertprefetch(repo, ctx, *files):
3110 3106 """Let extension changing the storage layer prefetch content"""
3111 3107 pass
3112 3108
3113 3109 def _performrevert(repo, parents, ctx, actions, interactive=False):
3114 3110 """function that actually perform all the actions computed for revert
3115 3111
3116 3112 This is an independent function to let extension to plug in and react to
3117 3113 the imminent revert.
3118 3114
3119 3115 Make sure you have the working directory locked when calling this function.
3120 3116 """
3121 3117 parent, p2 = parents
3122 3118 node = ctx.node()
3123 3119 def checkout(f):
3124 3120 fc = ctx[f]
3125 3121 repo.wwrite(f, fc.data(), fc.flags())
3126 3122
3127 3123 audit_path = pathutil.pathauditor(repo.root)
3128 3124 for f in actions['forget'][0]:
3129 3125 repo.dirstate.drop(f)
3130 3126 for f in actions['remove'][0]:
3131 3127 audit_path(f)
3132 3128 try:
3133 3129 util.unlinkpath(repo.wjoin(f))
3134 3130 except OSError:
3135 3131 pass
3136 3132 repo.dirstate.remove(f)
3137 3133 for f in actions['drop'][0]:
3138 3134 audit_path(f)
3139 3135 repo.dirstate.remove(f)
3140 3136
3141 3137 normal = None
3142 3138 if node == parent:
3143 3139 # We're reverting to our parent. If possible, we'd like status
3144 3140 # to report the file as clean. We have to use normallookup for
3145 3141 # merges to avoid losing information about merged/dirty files.
3146 3142 if p2 != nullid:
3147 3143 normal = repo.dirstate.normallookup
3148 3144 else:
3149 3145 normal = repo.dirstate.normal
3150 3146
3151 3147 newlyaddedandmodifiedfiles = set()
3152 3148 if interactive:
3153 3149 # Prompt the user for changes to revert
3154 3150 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3155 3151 m = scmutil.match(ctx, torevert, {})
3156 3152 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3157 3153 diffopts.nodates = True
3158 3154 diffopts.git = True
3159 3155 reversehunks = repo.ui.configbool('experimental',
3160 3156 'revertalternateinteractivemode',
3161 3157 True)
3162 3158 if reversehunks:
3163 3159 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3164 3160 else:
3165 3161 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3166 3162 originalchunks = patch.parsepatch(diff)
3167 3163
3168 3164 try:
3169 3165
3170 3166 chunks = recordfilter(repo.ui, originalchunks)
3171 3167 if reversehunks:
3172 3168 chunks = patch.reversehunks(chunks)
3173 3169
3174 3170 except patch.PatchError as err:
3175 3171 raise util.Abort(_('error parsing patch: %s') % err)
3176 3172
3177 3173 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3178 3174 # Apply changes
3179 3175 fp = cStringIO.StringIO()
3180 3176 for c in chunks:
3181 3177 c.write(fp)
3182 3178 dopatch = fp.tell()
3183 3179 fp.seek(0)
3184 3180 if dopatch:
3185 3181 try:
3186 3182 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3187 3183 except patch.PatchError as err:
3188 3184 raise util.Abort(str(err))
3189 3185 del fp
3190 3186 else:
3191 3187 for f in actions['revert'][0]:
3192 3188 checkout(f)
3193 3189 if normal:
3194 3190 normal(f)
3195 3191
3196 3192 for f in actions['add'][0]:
3197 3193 # Don't checkout modified files, they are already created by the diff
3198 3194 if f not in newlyaddedandmodifiedfiles:
3199 3195 checkout(f)
3200 3196 repo.dirstate.add(f)
3201 3197
3202 3198 normal = repo.dirstate.normallookup
3203 3199 if node == parent and p2 == nullid:
3204 3200 normal = repo.dirstate.normal
3205 3201 for f in actions['undelete'][0]:
3206 3202 checkout(f)
3207 3203 normal(f)
3208 3204
3209 3205 copied = copies.pathcopies(repo[parent], ctx)
3210 3206
3211 3207 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3212 3208 if f in copied:
3213 3209 repo.dirstate.copy(copied[f], f)
3214 3210
3215 3211 def command(table):
3216 3212 """Returns a function object to be used as a decorator for making commands.
3217 3213
3218 3214 This function receives a command table as its argument. The table should
3219 3215 be a dict.
3220 3216
3221 3217 The returned function can be used as a decorator for adding commands
3222 3218 to that command table. This function accepts multiple arguments to define
3223 3219 a command.
3224 3220
3225 3221 The first argument is the command name.
3226 3222
3227 3223 The options argument is an iterable of tuples defining command arguments.
3228 3224 See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
3229 3225
3230 3226 The synopsis argument defines a short, one line summary of how to use the
3231 3227 command. This shows up in the help output.
3232 3228
3233 3229 The norepo argument defines whether the command does not require a
3234 3230 local repository. Most commands operate against a repository, thus the
3235 3231 default is False.
3236 3232
3237 3233 The optionalrepo argument defines whether the command optionally requires
3238 3234 a local repository.
3239 3235
3240 3236 The inferrepo argument defines whether to try to find a repository from the
3241 3237 command line arguments. If True, arguments will be examined for potential
3242 3238 repository locations. See ``findrepo()``. If a repository is found, it
3243 3239 will be used.
3244 3240 """
3245 3241 def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False,
3246 3242 inferrepo=False):
3247 3243 def decorator(func):
3248 3244 if synopsis:
3249 3245 table[name] = func, list(options), synopsis
3250 3246 else:
3251 3247 table[name] = func, list(options)
3252 3248
3253 3249 if norepo:
3254 3250 # Avoid import cycle.
3255 3251 import commands
3256 3252 commands.norepo += ' %s' % ' '.join(parsealiases(name))
3257 3253
3258 3254 if optionalrepo:
3259 3255 import commands
3260 3256 commands.optionalrepo += ' %s' % ' '.join(parsealiases(name))
3261 3257
3262 3258 if inferrepo:
3263 3259 import commands
3264 3260 commands.inferrepo += ' %s' % ' '.join(parsealiases(name))
3265 3261
3266 3262 return func
3267 3263 return decorator
3268 3264
3269 3265 return cmd
3270 3266
3271 3267 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3272 3268 # commands.outgoing. "missing" is "missing" of the result of
3273 3269 # "findcommonoutgoing()"
3274 3270 outgoinghooks = util.hooks()
3275 3271
3276 3272 # a list of (ui, repo) functions called by commands.summary
3277 3273 summaryhooks = util.hooks()
3278 3274
3279 3275 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3280 3276 #
3281 3277 # functions should return tuple of booleans below, if 'changes' is None:
3282 3278 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3283 3279 #
3284 3280 # otherwise, 'changes' is a tuple of tuples below:
3285 3281 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3286 3282 # - (desturl, destbranch, destpeer, outgoing)
3287 3283 summaryremotehooks = util.hooks()
3288 3284
3289 3285 # A list of state files kept by multistep operations like graft.
3290 3286 # Since graft cannot be aborted, it is considered 'clearable' by update.
3291 3287 # note: bisect is intentionally excluded
3292 3288 # (state file, clearable, allowcommit, error, hint)
3293 3289 unfinishedstates = [
3294 3290 ('graftstate', True, False, _('graft in progress'),
3295 3291 _("use 'hg graft --continue' or 'hg update' to abort")),
3296 3292 ('updatestate', True, False, _('last update was interrupted'),
3297 3293 _("use 'hg update' to get a consistent checkout"))
3298 3294 ]
3299 3295
3300 3296 def checkunfinished(repo, commit=False):
3301 3297 '''Look for an unfinished multistep operation, like graft, and abort
3302 3298 if found. It's probably good to check this right before
3303 3299 bailifchanged().
3304 3300 '''
3305 3301 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3306 3302 if commit and allowcommit:
3307 3303 continue
3308 3304 if repo.vfs.exists(f):
3309 3305 raise util.Abort(msg, hint=hint)
3310 3306
3311 3307 def clearunfinished(repo):
3312 3308 '''Check for unfinished operations (as above), and clear the ones
3313 3309 that are clearable.
3314 3310 '''
3315 3311 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3316 3312 if not clearable and repo.vfs.exists(f):
3317 3313 raise util.Abort(msg, hint=hint)
3318 3314 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3319 3315 if clearable and repo.vfs.exists(f):
3320 3316 util.unlink(repo.join(f))
3321 3317
3322 3318 class dirstateguard(object):
3323 3319 '''Restore dirstate at unexpected failure.
3324 3320
3325 3321 At the construction, this class does:
3326 3322
3327 3323 - write current ``repo.dirstate`` out, and
3328 3324 - save ``.hg/dirstate`` into the backup file
3329 3325
3330 3326 This restores ``.hg/dirstate`` from backup file, if ``release()``
3331 3327 is invoked before ``close()``.
3332 3328
3333 3329 This just removes the backup file at ``close()`` before ``release()``.
3334 3330 '''
3335 3331
3336 3332 def __init__(self, repo, name):
3337 3333 repo.dirstate.write()
3338 3334 self._repo = repo
3339 3335 self._filename = 'dirstate.backup.%s.%d' % (name, id(self))
3340 3336 repo.vfs.write(self._filename, repo.vfs.tryread('dirstate'))
3341 3337 self._active = True
3342 3338 self._closed = False
3343 3339
3344 3340 def __del__(self):
3345 3341 if self._active: # still active
3346 3342 # this may occur, even if this class is used correctly:
3347 3343 # for example, releasing other resources like transaction
3348 3344 # may raise exception before ``dirstateguard.release`` in
3349 3345 # ``release(tr, ....)``.
3350 3346 self._abort()
3351 3347
3352 3348 def close(self):
3353 3349 if not self._active: # already inactivated
3354 3350 msg = (_("can't close already inactivated backup: %s")
3355 3351 % self._filename)
3356 3352 raise util.Abort(msg)
3357 3353
3358 3354 self._repo.vfs.unlink(self._filename)
3359 3355 self._active = False
3360 3356 self._closed = True
3361 3357
3362 3358 def _abort(self):
3363 3359 # this "invalidate()" prevents "wlock.release()" from writing
3364 3360 # changes of dirstate out after restoring to original status
3365 3361 self._repo.dirstate.invalidate()
3366 3362
3367 3363 self._repo.vfs.rename(self._filename, 'dirstate')
3368 3364 self._active = False
3369 3365
3370 3366 def release(self):
3371 3367 if not self._closed:
3372 3368 if not self._active: # already inactivated
3373 3369 msg = (_("can't release already inactivated backup: %s")
3374 3370 % self._filename)
3375 3371 raise util.Abort(msg)
3376 3372 self._abort()
3377 3373
3378 3374 _bundlecompspecs = {'none': None,
3379 3375 'bzip2': 'BZ',
3380 3376 'gzip': 'GZ',
3381 3377 }
3382 3378
3383 3379 _bundleversionspecs = {'v1': '01',
3384 3380 'v2': '02',
3385 3381 'bundle2': '02', #legacy
3386 3382 }
3387 3383
3388 3384 def parsebundletype(repo, spec):
3389 3385 """return the internal bundle type to use from a user input
3390 3386
3391 3387 This is parsing user specified bundle type as accepted in:
3392 3388
3393 3389 'hg bundle --type TYPE'.
3394 3390
3395 3391 It accept format in the form [compression][-version]|[version]
3396 3392
3397 3393 Consensus about extensions of the format for various bundle2 feature
3398 3394 is to prefix any feature with "+". eg "+treemanifest" or "gzip+phases"
3399 3395 """
3400 3396 comp, version = None, None
3401 3397
3402 3398 if '-' in spec:
3403 3399 comp, version = spec.split('-', 1)
3404 3400 elif spec in _bundlecompspecs:
3405 3401 comp = spec
3406 3402 elif spec in _bundleversionspecs:
3407 3403 version = spec
3408 3404 else:
3409 3405 raise util.Abort(_('unknown bundle type specified with --type'))
3410 3406
3411 3407 if comp is None:
3412 3408 comp = 'BZ'
3413 3409 else:
3414 3410 try:
3415 3411 comp = _bundlecompspecs[comp]
3416 3412 except KeyError:
3417 3413 raise util.Abort(_('unknown bundle type specified with --type'))
3418 3414
3419 3415 if version is None:
3420 3416 version = '01'
3421 3417 if 'generaldelta' in repo.requirements:
3422 3418 version = '02'
3423 3419 else:
3424 3420 try:
3425 3421 version = _bundleversionspecs[version]
3426 3422 except KeyError:
3427 3423 raise util.Abort(_('unknown bundle type specified with --type'))
3428 3424
3429 3425 return version, comp
General Comments 0
You need to be logged in to leave comments. Login now