##// END OF EJS Templates
export: remove useless comparison between file object and string...
Yuya Nishihara -
r27417:9073a1e4 default
parent child Browse files
Show More
@@ -1,3419 +1,3418 b''
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import hex, bin, nullid, nullrev, short
9 9 from i18n import _
10 10 import os, sys, errno, re, tempfile, cStringIO
11 11 import util, scmutil, templater, patch, error, templatekw, revlog, copies
12 12 import match as matchmod
13 13 import repair, graphmod, revset, phases, obsolete, pathutil
14 14 import changelog
15 15 import bookmarks
16 16 import encoding
17 17 import formatter
18 18 import crecord as crecordmod
19 19 import lock as lockmod
20 20
21 21 def ishunk(x):
22 22 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
23 23 return isinstance(x, hunkclasses)
24 24
25 25 def newandmodified(chunks, originalchunks):
26 26 newlyaddedandmodifiedfiles = set()
27 27 for chunk in chunks:
28 28 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
29 29 originalchunks:
30 30 newlyaddedandmodifiedfiles.add(chunk.header.filename())
31 31 return newlyaddedandmodifiedfiles
32 32
33 33 def parsealiases(cmd):
34 34 return cmd.lstrip("^").split("|")
35 35
36 36 def setupwrapcolorwrite(ui):
37 37 # wrap ui.write so diff output can be labeled/colorized
38 38 def wrapwrite(orig, *args, **kw):
39 39 label = kw.pop('label', '')
40 40 for chunk, l in patch.difflabel(lambda: args):
41 41 orig(chunk, label=label + l)
42 42
43 43 oldwrite = ui.write
44 44 def wrap(*args, **kwargs):
45 45 return wrapwrite(oldwrite, *args, **kwargs)
46 46 setattr(ui, 'write', wrap)
47 47 return oldwrite
48 48
49 49 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
50 50 if usecurses:
51 51 if testfile:
52 52 recordfn = crecordmod.testdecorator(testfile,
53 53 crecordmod.testchunkselector)
54 54 else:
55 55 recordfn = crecordmod.chunkselector
56 56
57 57 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
58 58
59 59 else:
60 60 return patch.filterpatch(ui, originalhunks, operation)
61 61
62 62 def recordfilter(ui, originalhunks, operation=None):
63 63 """ Prompts the user to filter the originalhunks and return a list of
64 64 selected hunks.
65 65 *operation* is used for ui purposes to indicate the user
66 66 what kind of filtering they are doing: reverting, committing, shelving, etc.
67 67 *operation* has to be a translated string.
68 68 """
69 69 usecurses = ui.configbool('experimental', 'crecord', False)
70 70 testfile = ui.config('experimental', 'crecordtest', None)
71 71 oldwrite = setupwrapcolorwrite(ui)
72 72 try:
73 73 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
74 74 testfile, operation)
75 75 finally:
76 76 ui.write = oldwrite
77 77 return newchunks, newopts
78 78
79 79 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
80 80 filterfn, *pats, **opts):
81 81 import merge as mergemod
82 82
83 83 if not ui.interactive():
84 84 if cmdsuggest:
85 85 msg = _('running non-interactively, use %s instead') % cmdsuggest
86 86 else:
87 87 msg = _('running non-interactively')
88 88 raise error.Abort(msg)
89 89
90 90 # make sure username is set before going interactive
91 91 if not opts.get('user'):
92 92 ui.username() # raise exception, username not provided
93 93
94 94 def recordfunc(ui, repo, message, match, opts):
95 95 """This is generic record driver.
96 96
97 97 Its job is to interactively filter local changes, and
98 98 accordingly prepare working directory into a state in which the
99 99 job can be delegated to a non-interactive commit command such as
100 100 'commit' or 'qrefresh'.
101 101
102 102 After the actual job is done by non-interactive command, the
103 103 working directory is restored to its original state.
104 104
105 105 In the end we'll record interesting changes, and everything else
106 106 will be left in place, so the user can continue working.
107 107 """
108 108
109 109 checkunfinished(repo, commit=True)
110 110 merge = len(repo[None].parents()) > 1
111 111 if merge:
112 112 raise error.Abort(_('cannot partially commit a merge '
113 113 '(use "hg commit" instead)'))
114 114
115 115 status = repo.status(match=match)
116 116 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
117 117 diffopts.nodates = True
118 118 diffopts.git = True
119 119 diffopts.showfunc = True
120 120 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
121 121 originalchunks = patch.parsepatch(originaldiff)
122 122
123 123 # 1. filter patch, so we have intending-to apply subset of it
124 124 try:
125 125 chunks, newopts = filterfn(ui, originalchunks)
126 126 except patch.PatchError as err:
127 127 raise error.Abort(_('error parsing patch: %s') % err)
128 128 opts.update(newopts)
129 129
130 130 # We need to keep a backup of files that have been newly added and
131 131 # modified during the recording process because there is a previous
132 132 # version without the edit in the workdir
133 133 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
134 134 contenders = set()
135 135 for h in chunks:
136 136 try:
137 137 contenders.update(set(h.files()))
138 138 except AttributeError:
139 139 pass
140 140
141 141 changed = status.modified + status.added + status.removed
142 142 newfiles = [f for f in changed if f in contenders]
143 143 if not newfiles:
144 144 ui.status(_('no changes to record\n'))
145 145 return 0
146 146
147 147 modified = set(status.modified)
148 148
149 149 # 2. backup changed files, so we can restore them in the end
150 150
151 151 if backupall:
152 152 tobackup = changed
153 153 else:
154 154 tobackup = [f for f in newfiles if f in modified or f in \
155 155 newlyaddedandmodifiedfiles]
156 156 backups = {}
157 157 if tobackup:
158 158 backupdir = repo.join('record-backups')
159 159 try:
160 160 os.mkdir(backupdir)
161 161 except OSError as err:
162 162 if err.errno != errno.EEXIST:
163 163 raise
164 164 try:
165 165 # backup continues
166 166 for f in tobackup:
167 167 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
168 168 dir=backupdir)
169 169 os.close(fd)
170 170 ui.debug('backup %r as %r\n' % (f, tmpname))
171 171 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
172 172 backups[f] = tmpname
173 173
174 174 fp = cStringIO.StringIO()
175 175 for c in chunks:
176 176 fname = c.filename()
177 177 if fname in backups:
178 178 c.write(fp)
179 179 dopatch = fp.tell()
180 180 fp.seek(0)
181 181
182 182 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
183 183 # 3a. apply filtered patch to clean repo (clean)
184 184 if backups:
185 185 # Equivalent to hg.revert
186 186 m = scmutil.matchfiles(repo, backups.keys())
187 187 mergemod.update(repo, repo.dirstate.p1(),
188 188 False, True, matcher=m)
189 189
190 190 # 3b. (apply)
191 191 if dopatch:
192 192 try:
193 193 ui.debug('applying patch\n')
194 194 ui.debug(fp.getvalue())
195 195 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
196 196 except patch.PatchError as err:
197 197 raise error.Abort(str(err))
198 198 del fp
199 199
200 200 # 4. We prepared working directory according to filtered
201 201 # patch. Now is the time to delegate the job to
202 202 # commit/qrefresh or the like!
203 203
204 204 # Make all of the pathnames absolute.
205 205 newfiles = [repo.wjoin(nf) for nf in newfiles]
206 206 return commitfunc(ui, repo, *newfiles, **opts)
207 207 finally:
208 208 # 5. finally restore backed-up files
209 209 try:
210 210 dirstate = repo.dirstate
211 211 for realname, tmpname in backups.iteritems():
212 212 ui.debug('restoring %r to %r\n' % (tmpname, realname))
213 213
214 214 if dirstate[realname] == 'n':
215 215 # without normallookup, restoring timestamp
216 216 # may cause partially committed files
217 217 # to be treated as unmodified
218 218 dirstate.normallookup(realname)
219 219
220 220 # copystat=True here and above are a hack to trick any
221 221 # editors that have f open that we haven't modified them.
222 222 #
223 223 # Also note that this racy as an editor could notice the
224 224 # file's mtime before we've finished writing it.
225 225 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
226 226 os.unlink(tmpname)
227 227 if tobackup:
228 228 os.rmdir(backupdir)
229 229 except OSError:
230 230 pass
231 231
232 232 def recordinwlock(ui, repo, message, match, opts):
233 233 wlock = repo.wlock()
234 234 try:
235 235 return recordfunc(ui, repo, message, match, opts)
236 236 finally:
237 237 wlock.release()
238 238
239 239 return commit(ui, repo, recordinwlock, pats, opts)
240 240
241 241 def findpossible(cmd, table, strict=False):
242 242 """
243 243 Return cmd -> (aliases, command table entry)
244 244 for each matching command.
245 245 Return debug commands (or their aliases) only if no normal command matches.
246 246 """
247 247 choice = {}
248 248 debugchoice = {}
249 249
250 250 if cmd in table:
251 251 # short-circuit exact matches, "log" alias beats "^log|history"
252 252 keys = [cmd]
253 253 else:
254 254 keys = table.keys()
255 255
256 256 allcmds = []
257 257 for e in keys:
258 258 aliases = parsealiases(e)
259 259 allcmds.extend(aliases)
260 260 found = None
261 261 if cmd in aliases:
262 262 found = cmd
263 263 elif not strict:
264 264 for a in aliases:
265 265 if a.startswith(cmd):
266 266 found = a
267 267 break
268 268 if found is not None:
269 269 if aliases[0].startswith("debug") or found.startswith("debug"):
270 270 debugchoice[found] = (aliases, table[e])
271 271 else:
272 272 choice[found] = (aliases, table[e])
273 273
274 274 if not choice and debugchoice:
275 275 choice = debugchoice
276 276
277 277 return choice, allcmds
278 278
279 279 def findcmd(cmd, table, strict=True):
280 280 """Return (aliases, command table entry) for command string."""
281 281 choice, allcmds = findpossible(cmd, table, strict)
282 282
283 283 if cmd in choice:
284 284 return choice[cmd]
285 285
286 286 if len(choice) > 1:
287 287 clist = choice.keys()
288 288 clist.sort()
289 289 raise error.AmbiguousCommand(cmd, clist)
290 290
291 291 if choice:
292 292 return choice.values()[0]
293 293
294 294 raise error.UnknownCommand(cmd, allcmds)
295 295
296 296 def findrepo(p):
297 297 while not os.path.isdir(os.path.join(p, ".hg")):
298 298 oldp, p = p, os.path.dirname(p)
299 299 if p == oldp:
300 300 return None
301 301
302 302 return p
303 303
304 304 def bailifchanged(repo, merge=True):
305 305 if merge and repo.dirstate.p2() != nullid:
306 306 raise error.Abort(_('outstanding uncommitted merge'))
307 307 modified, added, removed, deleted = repo.status()[:4]
308 308 if modified or added or removed or deleted:
309 309 raise error.Abort(_('uncommitted changes'))
310 310 ctx = repo[None]
311 311 for s in sorted(ctx.substate):
312 312 ctx.sub(s).bailifchanged()
313 313
314 314 def logmessage(ui, opts):
315 315 """ get the log message according to -m and -l option """
316 316 message = opts.get('message')
317 317 logfile = opts.get('logfile')
318 318
319 319 if message and logfile:
320 320 raise error.Abort(_('options --message and --logfile are mutually '
321 321 'exclusive'))
322 322 if not message and logfile:
323 323 try:
324 324 if logfile == '-':
325 325 message = ui.fin.read()
326 326 else:
327 327 message = '\n'.join(util.readfile(logfile).splitlines())
328 328 except IOError as inst:
329 329 raise error.Abort(_("can't read commit message '%s': %s") %
330 330 (logfile, inst.strerror))
331 331 return message
332 332
333 333 def mergeeditform(ctxorbool, baseformname):
334 334 """return appropriate editform name (referencing a committemplate)
335 335
336 336 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
337 337 merging is committed.
338 338
339 339 This returns baseformname with '.merge' appended if it is a merge,
340 340 otherwise '.normal' is appended.
341 341 """
342 342 if isinstance(ctxorbool, bool):
343 343 if ctxorbool:
344 344 return baseformname + ".merge"
345 345 elif 1 < len(ctxorbool.parents()):
346 346 return baseformname + ".merge"
347 347
348 348 return baseformname + ".normal"
349 349
350 350 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
351 351 editform='', **opts):
352 352 """get appropriate commit message editor according to '--edit' option
353 353
354 354 'finishdesc' is a function to be called with edited commit message
355 355 (= 'description' of the new changeset) just after editing, but
356 356 before checking empty-ness. It should return actual text to be
357 357 stored into history. This allows to change description before
358 358 storing.
359 359
360 360 'extramsg' is a extra message to be shown in the editor instead of
361 361 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
362 362 is automatically added.
363 363
364 364 'editform' is a dot-separated list of names, to distinguish
365 365 the purpose of commit text editing.
366 366
367 367 'getcommiteditor' returns 'commitforceeditor' regardless of
368 368 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
369 369 they are specific for usage in MQ.
370 370 """
371 371 if edit or finishdesc or extramsg:
372 372 return lambda r, c, s: commitforceeditor(r, c, s,
373 373 finishdesc=finishdesc,
374 374 extramsg=extramsg,
375 375 editform=editform)
376 376 elif editform:
377 377 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
378 378 else:
379 379 return commiteditor
380 380
381 381 def loglimit(opts):
382 382 """get the log limit according to option -l/--limit"""
383 383 limit = opts.get('limit')
384 384 if limit:
385 385 try:
386 386 limit = int(limit)
387 387 except ValueError:
388 388 raise error.Abort(_('limit must be a positive integer'))
389 389 if limit <= 0:
390 390 raise error.Abort(_('limit must be positive'))
391 391 else:
392 392 limit = None
393 393 return limit
394 394
395 395 def makefilename(repo, pat, node, desc=None,
396 396 total=None, seqno=None, revwidth=None, pathname=None):
397 397 node_expander = {
398 398 'H': lambda: hex(node),
399 399 'R': lambda: str(repo.changelog.rev(node)),
400 400 'h': lambda: short(node),
401 401 'm': lambda: re.sub('[^\w]', '_', str(desc))
402 402 }
403 403 expander = {
404 404 '%': lambda: '%',
405 405 'b': lambda: os.path.basename(repo.root),
406 406 }
407 407
408 408 try:
409 409 if node:
410 410 expander.update(node_expander)
411 411 if node:
412 412 expander['r'] = (lambda:
413 413 str(repo.changelog.rev(node)).zfill(revwidth or 0))
414 414 if total is not None:
415 415 expander['N'] = lambda: str(total)
416 416 if seqno is not None:
417 417 expander['n'] = lambda: str(seqno)
418 418 if total is not None and seqno is not None:
419 419 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
420 420 if pathname is not None:
421 421 expander['s'] = lambda: os.path.basename(pathname)
422 422 expander['d'] = lambda: os.path.dirname(pathname) or '.'
423 423 expander['p'] = lambda: pathname
424 424
425 425 newname = []
426 426 patlen = len(pat)
427 427 i = 0
428 428 while i < patlen:
429 429 c = pat[i]
430 430 if c == '%':
431 431 i += 1
432 432 c = pat[i]
433 433 c = expander[c]()
434 434 newname.append(c)
435 435 i += 1
436 436 return ''.join(newname)
437 437 except KeyError as inst:
438 438 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
439 439 inst.args[0])
440 440
441 441 def makefileobj(repo, pat, node=None, desc=None, total=None,
442 442 seqno=None, revwidth=None, mode='wb', modemap=None,
443 443 pathname=None):
444 444
445 445 writable = mode not in ('r', 'rb')
446 446
447 447 if not pat or pat == '-':
448 448 if writable:
449 449 fp = repo.ui.fout
450 450 else:
451 451 fp = repo.ui.fin
452 452 if util.safehasattr(fp, 'fileno'):
453 453 return os.fdopen(os.dup(fp.fileno()), mode)
454 454 else:
455 455 # if this fp can't be duped properly, return
456 456 # a dummy object that can be closed
457 457 class wrappedfileobj(object):
458 458 noop = lambda x: None
459 459 def __init__(self, f):
460 460 self.f = f
461 461 def __getattr__(self, attr):
462 462 if attr == 'close':
463 463 return self.noop
464 464 else:
465 465 return getattr(self.f, attr)
466 466
467 467 return wrappedfileobj(fp)
468 468 if util.safehasattr(pat, 'write') and writable:
469 469 return pat
470 470 if util.safehasattr(pat, 'read') and 'r' in mode:
471 471 return pat
472 472 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
473 473 if modemap is not None:
474 474 mode = modemap.get(fn, mode)
475 475 if mode == 'wb':
476 476 modemap[fn] = 'ab'
477 477 return open(fn, mode)
478 478
479 479 def openrevlog(repo, cmd, file_, opts):
480 480 """opens the changelog, manifest, a filelog or a given revlog"""
481 481 cl = opts['changelog']
482 482 mf = opts['manifest']
483 483 dir = opts['dir']
484 484 msg = None
485 485 if cl and mf:
486 486 msg = _('cannot specify --changelog and --manifest at the same time')
487 487 elif cl and dir:
488 488 msg = _('cannot specify --changelog and --dir at the same time')
489 489 elif cl or mf:
490 490 if file_:
491 491 msg = _('cannot specify filename with --changelog or --manifest')
492 492 elif not repo:
493 493 msg = _('cannot specify --changelog or --manifest or --dir '
494 494 'without a repository')
495 495 if msg:
496 496 raise error.Abort(msg)
497 497
498 498 r = None
499 499 if repo:
500 500 if cl:
501 501 r = repo.unfiltered().changelog
502 502 elif dir:
503 503 if 'treemanifest' not in repo.requirements:
504 504 raise error.Abort(_("--dir can only be used on repos with "
505 505 "treemanifest enabled"))
506 506 dirlog = repo.dirlog(file_)
507 507 if len(dirlog):
508 508 r = dirlog
509 509 elif mf:
510 510 r = repo.manifest
511 511 elif file_:
512 512 filelog = repo.file(file_)
513 513 if len(filelog):
514 514 r = filelog
515 515 if not r:
516 516 if not file_:
517 517 raise error.CommandError(cmd, _('invalid arguments'))
518 518 if not os.path.isfile(file_):
519 519 raise error.Abort(_("revlog '%s' not found") % file_)
520 520 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
521 521 file_[:-2] + ".i")
522 522 return r
523 523
524 524 def copy(ui, repo, pats, opts, rename=False):
525 525 # called with the repo lock held
526 526 #
527 527 # hgsep => pathname that uses "/" to separate directories
528 528 # ossep => pathname that uses os.sep to separate directories
529 529 cwd = repo.getcwd()
530 530 targets = {}
531 531 after = opts.get("after")
532 532 dryrun = opts.get("dry_run")
533 533 wctx = repo[None]
534 534
535 535 def walkpat(pat):
536 536 srcs = []
537 537 if after:
538 538 badstates = '?'
539 539 else:
540 540 badstates = '?r'
541 541 m = scmutil.match(repo[None], [pat], opts, globbed=True)
542 542 for abs in repo.walk(m):
543 543 state = repo.dirstate[abs]
544 544 rel = m.rel(abs)
545 545 exact = m.exact(abs)
546 546 if state in badstates:
547 547 if exact and state == '?':
548 548 ui.warn(_('%s: not copying - file is not managed\n') % rel)
549 549 if exact and state == 'r':
550 550 ui.warn(_('%s: not copying - file has been marked for'
551 551 ' remove\n') % rel)
552 552 continue
553 553 # abs: hgsep
554 554 # rel: ossep
555 555 srcs.append((abs, rel, exact))
556 556 return srcs
557 557
558 558 # abssrc: hgsep
559 559 # relsrc: ossep
560 560 # otarget: ossep
561 561 def copyfile(abssrc, relsrc, otarget, exact):
562 562 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
563 563 if '/' in abstarget:
564 564 # We cannot normalize abstarget itself, this would prevent
565 565 # case only renames, like a => A.
566 566 abspath, absname = abstarget.rsplit('/', 1)
567 567 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
568 568 reltarget = repo.pathto(abstarget, cwd)
569 569 target = repo.wjoin(abstarget)
570 570 src = repo.wjoin(abssrc)
571 571 state = repo.dirstate[abstarget]
572 572
573 573 scmutil.checkportable(ui, abstarget)
574 574
575 575 # check for collisions
576 576 prevsrc = targets.get(abstarget)
577 577 if prevsrc is not None:
578 578 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
579 579 (reltarget, repo.pathto(abssrc, cwd),
580 580 repo.pathto(prevsrc, cwd)))
581 581 return
582 582
583 583 # check for overwrites
584 584 exists = os.path.lexists(target)
585 585 samefile = False
586 586 if exists and abssrc != abstarget:
587 587 if (repo.dirstate.normalize(abssrc) ==
588 588 repo.dirstate.normalize(abstarget)):
589 589 if not rename:
590 590 ui.warn(_("%s: can't copy - same file\n") % reltarget)
591 591 return
592 592 exists = False
593 593 samefile = True
594 594
595 595 if not after and exists or after and state in 'mn':
596 596 if not opts['force']:
597 597 ui.warn(_('%s: not overwriting - file exists\n') %
598 598 reltarget)
599 599 return
600 600
601 601 if after:
602 602 if not exists:
603 603 if rename:
604 604 ui.warn(_('%s: not recording move - %s does not exist\n') %
605 605 (relsrc, reltarget))
606 606 else:
607 607 ui.warn(_('%s: not recording copy - %s does not exist\n') %
608 608 (relsrc, reltarget))
609 609 return
610 610 elif not dryrun:
611 611 try:
612 612 if exists:
613 613 os.unlink(target)
614 614 targetdir = os.path.dirname(target) or '.'
615 615 if not os.path.isdir(targetdir):
616 616 os.makedirs(targetdir)
617 617 if samefile:
618 618 tmp = target + "~hgrename"
619 619 os.rename(src, tmp)
620 620 os.rename(tmp, target)
621 621 else:
622 622 util.copyfile(src, target)
623 623 srcexists = True
624 624 except IOError as inst:
625 625 if inst.errno == errno.ENOENT:
626 626 ui.warn(_('%s: deleted in working directory\n') % relsrc)
627 627 srcexists = False
628 628 else:
629 629 ui.warn(_('%s: cannot copy - %s\n') %
630 630 (relsrc, inst.strerror))
631 631 return True # report a failure
632 632
633 633 if ui.verbose or not exact:
634 634 if rename:
635 635 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
636 636 else:
637 637 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
638 638
639 639 targets[abstarget] = abssrc
640 640
641 641 # fix up dirstate
642 642 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
643 643 dryrun=dryrun, cwd=cwd)
644 644 if rename and not dryrun:
645 645 if not after and srcexists and not samefile:
646 646 util.unlinkpath(repo.wjoin(abssrc))
647 647 wctx.forget([abssrc])
648 648
649 649 # pat: ossep
650 650 # dest ossep
651 651 # srcs: list of (hgsep, hgsep, ossep, bool)
652 652 # return: function that takes hgsep and returns ossep
653 653 def targetpathfn(pat, dest, srcs):
654 654 if os.path.isdir(pat):
655 655 abspfx = pathutil.canonpath(repo.root, cwd, pat)
656 656 abspfx = util.localpath(abspfx)
657 657 if destdirexists:
658 658 striplen = len(os.path.split(abspfx)[0])
659 659 else:
660 660 striplen = len(abspfx)
661 661 if striplen:
662 662 striplen += len(os.sep)
663 663 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
664 664 elif destdirexists:
665 665 res = lambda p: os.path.join(dest,
666 666 os.path.basename(util.localpath(p)))
667 667 else:
668 668 res = lambda p: dest
669 669 return res
670 670
671 671 # pat: ossep
672 672 # dest ossep
673 673 # srcs: list of (hgsep, hgsep, ossep, bool)
674 674 # return: function that takes hgsep and returns ossep
675 675 def targetpathafterfn(pat, dest, srcs):
676 676 if matchmod.patkind(pat):
677 677 # a mercurial pattern
678 678 res = lambda p: os.path.join(dest,
679 679 os.path.basename(util.localpath(p)))
680 680 else:
681 681 abspfx = pathutil.canonpath(repo.root, cwd, pat)
682 682 if len(abspfx) < len(srcs[0][0]):
683 683 # A directory. Either the target path contains the last
684 684 # component of the source path or it does not.
685 685 def evalpath(striplen):
686 686 score = 0
687 687 for s in srcs:
688 688 t = os.path.join(dest, util.localpath(s[0])[striplen:])
689 689 if os.path.lexists(t):
690 690 score += 1
691 691 return score
692 692
693 693 abspfx = util.localpath(abspfx)
694 694 striplen = len(abspfx)
695 695 if striplen:
696 696 striplen += len(os.sep)
697 697 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
698 698 score = evalpath(striplen)
699 699 striplen1 = len(os.path.split(abspfx)[0])
700 700 if striplen1:
701 701 striplen1 += len(os.sep)
702 702 if evalpath(striplen1) > score:
703 703 striplen = striplen1
704 704 res = lambda p: os.path.join(dest,
705 705 util.localpath(p)[striplen:])
706 706 else:
707 707 # a file
708 708 if destdirexists:
709 709 res = lambda p: os.path.join(dest,
710 710 os.path.basename(util.localpath(p)))
711 711 else:
712 712 res = lambda p: dest
713 713 return res
714 714
715 715 pats = scmutil.expandpats(pats)
716 716 if not pats:
717 717 raise error.Abort(_('no source or destination specified'))
718 718 if len(pats) == 1:
719 719 raise error.Abort(_('no destination specified'))
720 720 dest = pats.pop()
721 721 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
722 722 if not destdirexists:
723 723 if len(pats) > 1 or matchmod.patkind(pats[0]):
724 724 raise error.Abort(_('with multiple sources, destination must be an '
725 725 'existing directory'))
726 726 if util.endswithsep(dest):
727 727 raise error.Abort(_('destination %s is not a directory') % dest)
728 728
729 729 tfn = targetpathfn
730 730 if after:
731 731 tfn = targetpathafterfn
732 732 copylist = []
733 733 for pat in pats:
734 734 srcs = walkpat(pat)
735 735 if not srcs:
736 736 continue
737 737 copylist.append((tfn(pat, dest, srcs), srcs))
738 738 if not copylist:
739 739 raise error.Abort(_('no files to copy'))
740 740
741 741 errors = 0
742 742 for targetpath, srcs in copylist:
743 743 for abssrc, relsrc, exact in srcs:
744 744 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
745 745 errors += 1
746 746
747 747 if errors:
748 748 ui.warn(_('(consider using --after)\n'))
749 749
750 750 return errors != 0
751 751
752 752 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
753 753 runargs=None, appendpid=False):
754 754 '''Run a command as a service.'''
755 755
756 756 def writepid(pid):
757 757 if opts['pid_file']:
758 758 if appendpid:
759 759 mode = 'a'
760 760 else:
761 761 mode = 'w'
762 762 fp = open(opts['pid_file'], mode)
763 763 fp.write(str(pid) + '\n')
764 764 fp.close()
765 765
766 766 if opts['daemon'] and not opts['daemon_pipefds']:
767 767 # Signal child process startup with file removal
768 768 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
769 769 os.close(lockfd)
770 770 try:
771 771 if not runargs:
772 772 runargs = util.hgcmd() + sys.argv[1:]
773 773 runargs.append('--daemon-pipefds=%s' % lockpath)
774 774 # Don't pass --cwd to the child process, because we've already
775 775 # changed directory.
776 776 for i in xrange(1, len(runargs)):
777 777 if runargs[i].startswith('--cwd='):
778 778 del runargs[i]
779 779 break
780 780 elif runargs[i].startswith('--cwd'):
781 781 del runargs[i:i + 2]
782 782 break
783 783 def condfn():
784 784 return not os.path.exists(lockpath)
785 785 pid = util.rundetached(runargs, condfn)
786 786 if pid < 0:
787 787 raise error.Abort(_('child process failed to start'))
788 788 writepid(pid)
789 789 finally:
790 790 try:
791 791 os.unlink(lockpath)
792 792 except OSError as e:
793 793 if e.errno != errno.ENOENT:
794 794 raise
795 795 if parentfn:
796 796 return parentfn(pid)
797 797 else:
798 798 return
799 799
800 800 if initfn:
801 801 initfn()
802 802
803 803 if not opts['daemon']:
804 804 writepid(os.getpid())
805 805
806 806 if opts['daemon_pipefds']:
807 807 lockpath = opts['daemon_pipefds']
808 808 try:
809 809 os.setsid()
810 810 except AttributeError:
811 811 pass
812 812 os.unlink(lockpath)
813 813 util.hidewindow()
814 814 sys.stdout.flush()
815 815 sys.stderr.flush()
816 816
817 817 nullfd = os.open(os.devnull, os.O_RDWR)
818 818 logfilefd = nullfd
819 819 if logfile:
820 820 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
821 821 os.dup2(nullfd, 0)
822 822 os.dup2(logfilefd, 1)
823 823 os.dup2(logfilefd, 2)
824 824 if nullfd not in (0, 1, 2):
825 825 os.close(nullfd)
826 826 if logfile and logfilefd not in (0, 1, 2):
827 827 os.close(logfilefd)
828 828
829 829 if runfn:
830 830 return runfn()
831 831
832 832 ## facility to let extension process additional data into an import patch
833 833 # list of identifier to be executed in order
834 834 extrapreimport = [] # run before commit
835 835 extrapostimport = [] # run after commit
836 836 # mapping from identifier to actual import function
837 837 #
838 838 # 'preimport' are run before the commit is made and are provided the following
839 839 # arguments:
840 840 # - repo: the localrepository instance,
841 841 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
842 842 # - extra: the future extra dictionary of the changeset, please mutate it,
843 843 # - opts: the import options.
844 844 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
845 845 # mutation of in memory commit and more. Feel free to rework the code to get
846 846 # there.
847 847 extrapreimportmap = {}
848 848 # 'postimport' are run after the commit is made and are provided the following
849 849 # argument:
850 850 # - ctx: the changectx created by import.
851 851 extrapostimportmap = {}
852 852
853 853 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
854 854 """Utility function used by commands.import to import a single patch
855 855
856 856 This function is explicitly defined here to help the evolve extension to
857 857 wrap this part of the import logic.
858 858
859 859 The API is currently a bit ugly because it a simple code translation from
860 860 the import command. Feel free to make it better.
861 861
862 862 :hunk: a patch (as a binary string)
863 863 :parents: nodes that will be parent of the created commit
864 864 :opts: the full dict of option passed to the import command
865 865 :msgs: list to save commit message to.
866 866 (used in case we need to save it when failing)
867 867 :updatefunc: a function that update a repo to a given node
868 868 updatefunc(<repo>, <node>)
869 869 """
870 870 # avoid cycle context -> subrepo -> cmdutil
871 871 import context
872 872 extractdata = patch.extract(ui, hunk)
873 873 tmpname = extractdata.get('filename')
874 874 message = extractdata.get('message')
875 875 user = extractdata.get('user')
876 876 date = extractdata.get('date')
877 877 branch = extractdata.get('branch')
878 878 nodeid = extractdata.get('nodeid')
879 879 p1 = extractdata.get('p1')
880 880 p2 = extractdata.get('p2')
881 881
882 882 update = not opts.get('bypass')
883 883 strip = opts["strip"]
884 884 prefix = opts["prefix"]
885 885 sim = float(opts.get('similarity') or 0)
886 886 if not tmpname:
887 887 return (None, None, False)
888 888 msg = _('applied to working directory')
889 889
890 890 rejects = False
891 891
892 892 try:
893 893 cmdline_message = logmessage(ui, opts)
894 894 if cmdline_message:
895 895 # pickup the cmdline msg
896 896 message = cmdline_message
897 897 elif message:
898 898 # pickup the patch msg
899 899 message = message.strip()
900 900 else:
901 901 # launch the editor
902 902 message = None
903 903 ui.debug('message:\n%s\n' % message)
904 904
905 905 if len(parents) == 1:
906 906 parents.append(repo[nullid])
907 907 if opts.get('exact'):
908 908 if not nodeid or not p1:
909 909 raise error.Abort(_('not a Mercurial patch'))
910 910 p1 = repo[p1]
911 911 p2 = repo[p2 or nullid]
912 912 elif p2:
913 913 try:
914 914 p1 = repo[p1]
915 915 p2 = repo[p2]
916 916 # Without any options, consider p2 only if the
917 917 # patch is being applied on top of the recorded
918 918 # first parent.
919 919 if p1 != parents[0]:
920 920 p1 = parents[0]
921 921 p2 = repo[nullid]
922 922 except error.RepoError:
923 923 p1, p2 = parents
924 924 if p2.node() == nullid:
925 925 ui.warn(_("warning: import the patch as a normal revision\n"
926 926 "(use --exact to import the patch as a merge)\n"))
927 927 else:
928 928 p1, p2 = parents
929 929
930 930 n = None
931 931 if update:
932 932 if p1 != parents[0]:
933 933 updatefunc(repo, p1.node())
934 934 if p2 != parents[1]:
935 935 repo.setparents(p1.node(), p2.node())
936 936
937 937 if opts.get('exact') or opts.get('import_branch'):
938 938 repo.dirstate.setbranch(branch or 'default')
939 939
940 940 partial = opts.get('partial', False)
941 941 files = set()
942 942 try:
943 943 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
944 944 files=files, eolmode=None, similarity=sim / 100.0)
945 945 except patch.PatchError as e:
946 946 if not partial:
947 947 raise error.Abort(str(e))
948 948 if partial:
949 949 rejects = True
950 950
951 951 files = list(files)
952 952 if opts.get('no_commit'):
953 953 if message:
954 954 msgs.append(message)
955 955 else:
956 956 if opts.get('exact') or p2:
957 957 # If you got here, you either use --force and know what
958 958 # you are doing or used --exact or a merge patch while
959 959 # being updated to its first parent.
960 960 m = None
961 961 else:
962 962 m = scmutil.matchfiles(repo, files or [])
963 963 editform = mergeeditform(repo[None], 'import.normal')
964 964 if opts.get('exact'):
965 965 editor = None
966 966 else:
967 967 editor = getcommiteditor(editform=editform, **opts)
968 968 allowemptyback = repo.ui.backupconfig('ui', 'allowemptycommit')
969 969 extra = {}
970 970 for idfunc in extrapreimport:
971 971 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
972 972 try:
973 973 if partial:
974 974 repo.ui.setconfig('ui', 'allowemptycommit', True)
975 975 n = repo.commit(message, opts.get('user') or user,
976 976 opts.get('date') or date, match=m,
977 977 editor=editor, extra=extra)
978 978 for idfunc in extrapostimport:
979 979 extrapostimportmap[idfunc](repo[n])
980 980 finally:
981 981 repo.ui.restoreconfig(allowemptyback)
982 982 else:
983 983 if opts.get('exact') or opts.get('import_branch'):
984 984 branch = branch or 'default'
985 985 else:
986 986 branch = p1.branch()
987 987 store = patch.filestore()
988 988 try:
989 989 files = set()
990 990 try:
991 991 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
992 992 files, eolmode=None)
993 993 except patch.PatchError as e:
994 994 raise error.Abort(str(e))
995 995 if opts.get('exact'):
996 996 editor = None
997 997 else:
998 998 editor = getcommiteditor(editform='import.bypass')
999 999 memctx = context.makememctx(repo, (p1.node(), p2.node()),
1000 1000 message,
1001 1001 opts.get('user') or user,
1002 1002 opts.get('date') or date,
1003 1003 branch, files, store,
1004 1004 editor=editor)
1005 1005 n = memctx.commit()
1006 1006 finally:
1007 1007 store.close()
1008 1008 if opts.get('exact') and opts.get('no_commit'):
1009 1009 # --exact with --no-commit is still useful in that it does merge
1010 1010 # and branch bits
1011 1011 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1012 1012 elif opts.get('exact') and hex(n) != nodeid:
1013 1013 raise error.Abort(_('patch is damaged or loses information'))
1014 1014 if n:
1015 1015 # i18n: refers to a short changeset id
1016 1016 msg = _('created %s') % short(n)
1017 1017 return (msg, n, rejects)
1018 1018 finally:
1019 1019 os.unlink(tmpname)
1020 1020
1021 1021 # facility to let extensions include additional data in an exported patch
1022 1022 # list of identifiers to be executed in order
1023 1023 extraexport = []
1024 1024 # mapping from identifier to actual export function
1025 1025 # function as to return a string to be added to the header or None
1026 1026 # it is given two arguments (sequencenumber, changectx)
1027 1027 extraexportmap = {}
1028 1028
1029 1029 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1030 1030 opts=None, match=None):
1031 1031 '''export changesets as hg patches.'''
1032 1032
1033 1033 total = len(revs)
1034 1034 revwidth = max([len(str(rev)) for rev in revs])
1035 1035 filemode = {}
1036 1036
1037 1037 def single(rev, seqno, fp):
1038 1038 ctx = repo[rev]
1039 1039 node = ctx.node()
1040 1040 parents = [p.node() for p in ctx.parents() if p]
1041 1041 branch = ctx.branch()
1042 1042 if switch_parent:
1043 1043 parents.reverse()
1044 1044
1045 1045 if parents:
1046 1046 prev = parents[0]
1047 1047 else:
1048 1048 prev = nullid
1049 1049
1050 1050 shouldclose = False
1051 1051 if not fp and len(template) > 0:
1052 1052 desc_lines = ctx.description().rstrip().split('\n')
1053 1053 desc = desc_lines[0] #Commit always has a first line.
1054 1054 fp = makefileobj(repo, template, node, desc=desc, total=total,
1055 1055 seqno=seqno, revwidth=revwidth, mode='wb',
1056 1056 modemap=filemode)
1057 if fp != template:
1058 shouldclose = True
1057 shouldclose = True
1059 1058 if fp and not getattr(fp, 'name', '<unnamed>').startswith('<'):
1060 1059 repo.ui.note("%s\n" % fp.name)
1061 1060
1062 1061 if not fp:
1063 1062 write = repo.ui.write
1064 1063 else:
1065 1064 def write(s, **kw):
1066 1065 fp.write(s)
1067 1066
1068 1067 write("# HG changeset patch\n")
1069 1068 write("# User %s\n" % ctx.user())
1070 1069 write("# Date %d %d\n" % ctx.date())
1071 1070 write("# %s\n" % util.datestr(ctx.date()))
1072 1071 if branch and branch != 'default':
1073 1072 write("# Branch %s\n" % branch)
1074 1073 write("# Node ID %s\n" % hex(node))
1075 1074 write("# Parent %s\n" % hex(prev))
1076 1075 if len(parents) > 1:
1077 1076 write("# Parent %s\n" % hex(parents[1]))
1078 1077
1079 1078 for headerid in extraexport:
1080 1079 header = extraexportmap[headerid](seqno, ctx)
1081 1080 if header is not None:
1082 1081 write('# %s\n' % header)
1083 1082 write(ctx.description().rstrip())
1084 1083 write("\n\n")
1085 1084
1086 1085 for chunk, label in patch.diffui(repo, prev, node, match, opts=opts):
1087 1086 write(chunk, label=label)
1088 1087
1089 1088 if shouldclose:
1090 1089 fp.close()
1091 1090
1092 1091 for seqno, rev in enumerate(revs):
1093 1092 single(rev, seqno + 1, fp)
1094 1093
1095 1094 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1096 1095 changes=None, stat=False, fp=None, prefix='',
1097 1096 root='', listsubrepos=False):
1098 1097 '''show diff or diffstat.'''
1099 1098 if fp is None:
1100 1099 write = ui.write
1101 1100 else:
1102 1101 def write(s, **kw):
1103 1102 fp.write(s)
1104 1103
1105 1104 if root:
1106 1105 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1107 1106 else:
1108 1107 relroot = ''
1109 1108 if relroot != '':
1110 1109 # XXX relative roots currently don't work if the root is within a
1111 1110 # subrepo
1112 1111 uirelroot = match.uipath(relroot)
1113 1112 relroot += '/'
1114 1113 for matchroot in match.files():
1115 1114 if not matchroot.startswith(relroot):
1116 1115 ui.warn(_('warning: %s not inside relative root %s\n') % (
1117 1116 match.uipath(matchroot), uirelroot))
1118 1117
1119 1118 if stat:
1120 1119 diffopts = diffopts.copy(context=0)
1121 1120 width = 80
1122 1121 if not ui.plain():
1123 1122 width = ui.termwidth()
1124 1123 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1125 1124 prefix=prefix, relroot=relroot)
1126 1125 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1127 1126 width=width,
1128 1127 git=diffopts.git):
1129 1128 write(chunk, label=label)
1130 1129 else:
1131 1130 for chunk, label in patch.diffui(repo, node1, node2, match,
1132 1131 changes, diffopts, prefix=prefix,
1133 1132 relroot=relroot):
1134 1133 write(chunk, label=label)
1135 1134
1136 1135 if listsubrepos:
1137 1136 ctx1 = repo[node1]
1138 1137 ctx2 = repo[node2]
1139 1138 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1140 1139 tempnode2 = node2
1141 1140 try:
1142 1141 if node2 is not None:
1143 1142 tempnode2 = ctx2.substate[subpath][1]
1144 1143 except KeyError:
1145 1144 # A subrepo that existed in node1 was deleted between node1 and
1146 1145 # node2 (inclusive). Thus, ctx2's substate won't contain that
1147 1146 # subpath. The best we can do is to ignore it.
1148 1147 tempnode2 = None
1149 1148 submatch = matchmod.narrowmatcher(subpath, match)
1150 1149 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1151 1150 stat=stat, fp=fp, prefix=prefix)
1152 1151
1153 1152 class changeset_printer(object):
1154 1153 '''show changeset information when templating not requested.'''
1155 1154
1156 1155 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1157 1156 self.ui = ui
1158 1157 self.repo = repo
1159 1158 self.buffered = buffered
1160 1159 self.matchfn = matchfn
1161 1160 self.diffopts = diffopts
1162 1161 self.header = {}
1163 1162 self.hunk = {}
1164 1163 self.lastheader = None
1165 1164 self.footer = None
1166 1165
1167 1166 def flush(self, ctx):
1168 1167 rev = ctx.rev()
1169 1168 if rev in self.header:
1170 1169 h = self.header[rev]
1171 1170 if h != self.lastheader:
1172 1171 self.lastheader = h
1173 1172 self.ui.write(h)
1174 1173 del self.header[rev]
1175 1174 if rev in self.hunk:
1176 1175 self.ui.write(self.hunk[rev])
1177 1176 del self.hunk[rev]
1178 1177 return 1
1179 1178 return 0
1180 1179
1181 1180 def close(self):
1182 1181 if self.footer:
1183 1182 self.ui.write(self.footer)
1184 1183
1185 1184 def show(self, ctx, copies=None, matchfn=None, **props):
1186 1185 if self.buffered:
1187 1186 self.ui.pushbuffer(labeled=True)
1188 1187 self._show(ctx, copies, matchfn, props)
1189 1188 self.hunk[ctx.rev()] = self.ui.popbuffer()
1190 1189 else:
1191 1190 self._show(ctx, copies, matchfn, props)
1192 1191
1193 1192 def _show(self, ctx, copies, matchfn, props):
1194 1193 '''show a single changeset or file revision'''
1195 1194 changenode = ctx.node()
1196 1195 rev = ctx.rev()
1197 1196 if self.ui.debugflag:
1198 1197 hexfunc = hex
1199 1198 else:
1200 1199 hexfunc = short
1201 1200 # as of now, wctx.node() and wctx.rev() return None, but we want to
1202 1201 # show the same values as {node} and {rev} templatekw
1203 1202 revnode = (scmutil.intrev(rev), hexfunc(bin(ctx.hex())))
1204 1203
1205 1204 if self.ui.quiet:
1206 1205 self.ui.write("%d:%s\n" % revnode, label='log.node')
1207 1206 return
1208 1207
1209 1208 date = util.datestr(ctx.date())
1210 1209
1211 1210 # i18n: column positioning for "hg log"
1212 1211 self.ui.write(_("changeset: %d:%s\n") % revnode,
1213 1212 label='log.changeset changeset.%s' % ctx.phasestr())
1214 1213
1215 1214 # branches are shown first before any other names due to backwards
1216 1215 # compatibility
1217 1216 branch = ctx.branch()
1218 1217 # don't show the default branch name
1219 1218 if branch != 'default':
1220 1219 # i18n: column positioning for "hg log"
1221 1220 self.ui.write(_("branch: %s\n") % branch,
1222 1221 label='log.branch')
1223 1222
1224 1223 for name, ns in self.repo.names.iteritems():
1225 1224 # branches has special logic already handled above, so here we just
1226 1225 # skip it
1227 1226 if name == 'branches':
1228 1227 continue
1229 1228 # we will use the templatename as the color name since those two
1230 1229 # should be the same
1231 1230 for name in ns.names(self.repo, changenode):
1232 1231 self.ui.write(ns.logfmt % name,
1233 1232 label='log.%s' % ns.colorname)
1234 1233 if self.ui.debugflag:
1235 1234 # i18n: column positioning for "hg log"
1236 1235 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1237 1236 label='log.phase')
1238 1237 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1239 1238 label = 'log.parent changeset.%s' % pctx.phasestr()
1240 1239 # i18n: column positioning for "hg log"
1241 1240 self.ui.write(_("parent: %d:%s\n")
1242 1241 % (pctx.rev(), hexfunc(pctx.node())),
1243 1242 label=label)
1244 1243
1245 1244 if self.ui.debugflag and rev is not None:
1246 1245 mnode = ctx.manifestnode()
1247 1246 # i18n: column positioning for "hg log"
1248 1247 self.ui.write(_("manifest: %d:%s\n") %
1249 1248 (self.repo.manifest.rev(mnode), hex(mnode)),
1250 1249 label='ui.debug log.manifest')
1251 1250 # i18n: column positioning for "hg log"
1252 1251 self.ui.write(_("user: %s\n") % ctx.user(),
1253 1252 label='log.user')
1254 1253 # i18n: column positioning for "hg log"
1255 1254 self.ui.write(_("date: %s\n") % date,
1256 1255 label='log.date')
1257 1256
1258 1257 if self.ui.debugflag:
1259 1258 files = ctx.p1().status(ctx)[:3]
1260 1259 for key, value in zip([# i18n: column positioning for "hg log"
1261 1260 _("files:"),
1262 1261 # i18n: column positioning for "hg log"
1263 1262 _("files+:"),
1264 1263 # i18n: column positioning for "hg log"
1265 1264 _("files-:")], files):
1266 1265 if value:
1267 1266 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1268 1267 label='ui.debug log.files')
1269 1268 elif ctx.files() and self.ui.verbose:
1270 1269 # i18n: column positioning for "hg log"
1271 1270 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1272 1271 label='ui.note log.files')
1273 1272 if copies and self.ui.verbose:
1274 1273 copies = ['%s (%s)' % c for c in copies]
1275 1274 # i18n: column positioning for "hg log"
1276 1275 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1277 1276 label='ui.note log.copies')
1278 1277
1279 1278 extra = ctx.extra()
1280 1279 if extra and self.ui.debugflag:
1281 1280 for key, value in sorted(extra.items()):
1282 1281 # i18n: column positioning for "hg log"
1283 1282 self.ui.write(_("extra: %s=%s\n")
1284 1283 % (key, value.encode('string_escape')),
1285 1284 label='ui.debug log.extra')
1286 1285
1287 1286 description = ctx.description().strip()
1288 1287 if description:
1289 1288 if self.ui.verbose:
1290 1289 self.ui.write(_("description:\n"),
1291 1290 label='ui.note log.description')
1292 1291 self.ui.write(description,
1293 1292 label='ui.note log.description')
1294 1293 self.ui.write("\n\n")
1295 1294 else:
1296 1295 # i18n: column positioning for "hg log"
1297 1296 self.ui.write(_("summary: %s\n") %
1298 1297 description.splitlines()[0],
1299 1298 label='log.summary')
1300 1299 self.ui.write("\n")
1301 1300
1302 1301 self.showpatch(ctx, matchfn)
1303 1302
1304 1303 def showpatch(self, ctx, matchfn):
1305 1304 if not matchfn:
1306 1305 matchfn = self.matchfn
1307 1306 if matchfn:
1308 1307 stat = self.diffopts.get('stat')
1309 1308 diff = self.diffopts.get('patch')
1310 1309 diffopts = patch.diffallopts(self.ui, self.diffopts)
1311 1310 node = ctx.node()
1312 1311 prev = ctx.p1()
1313 1312 if stat:
1314 1313 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1315 1314 match=matchfn, stat=True)
1316 1315 if diff:
1317 1316 if stat:
1318 1317 self.ui.write("\n")
1319 1318 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1320 1319 match=matchfn, stat=False)
1321 1320 self.ui.write("\n")
1322 1321
1323 1322 class jsonchangeset(changeset_printer):
1324 1323 '''format changeset information.'''
1325 1324
1326 1325 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1327 1326 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1328 1327 self.cache = {}
1329 1328 self._first = True
1330 1329
1331 1330 def close(self):
1332 1331 if not self._first:
1333 1332 self.ui.write("\n]\n")
1334 1333 else:
1335 1334 self.ui.write("[]\n")
1336 1335
1337 1336 def _show(self, ctx, copies, matchfn, props):
1338 1337 '''show a single changeset or file revision'''
1339 1338 rev = ctx.rev()
1340 1339 if rev is None:
1341 1340 jrev = jnode = 'null'
1342 1341 else:
1343 1342 jrev = str(rev)
1344 1343 jnode = '"%s"' % hex(ctx.node())
1345 1344 j = encoding.jsonescape
1346 1345
1347 1346 if self._first:
1348 1347 self.ui.write("[\n {")
1349 1348 self._first = False
1350 1349 else:
1351 1350 self.ui.write(",\n {")
1352 1351
1353 1352 if self.ui.quiet:
1354 1353 self.ui.write('\n "rev": %s' % jrev)
1355 1354 self.ui.write(',\n "node": %s' % jnode)
1356 1355 self.ui.write('\n }')
1357 1356 return
1358 1357
1359 1358 self.ui.write('\n "rev": %s' % jrev)
1360 1359 self.ui.write(',\n "node": %s' % jnode)
1361 1360 self.ui.write(',\n "branch": "%s"' % j(ctx.branch()))
1362 1361 self.ui.write(',\n "phase": "%s"' % ctx.phasestr())
1363 1362 self.ui.write(',\n "user": "%s"' % j(ctx.user()))
1364 1363 self.ui.write(',\n "date": [%d, %d]' % ctx.date())
1365 1364 self.ui.write(',\n "desc": "%s"' % j(ctx.description()))
1366 1365
1367 1366 self.ui.write(',\n "bookmarks": [%s]' %
1368 1367 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1369 1368 self.ui.write(',\n "tags": [%s]' %
1370 1369 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1371 1370 self.ui.write(',\n "parents": [%s]' %
1372 1371 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1373 1372
1374 1373 if self.ui.debugflag:
1375 1374 if rev is None:
1376 1375 jmanifestnode = 'null'
1377 1376 else:
1378 1377 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1379 1378 self.ui.write(',\n "manifest": %s' % jmanifestnode)
1380 1379
1381 1380 self.ui.write(',\n "extra": {%s}' %
1382 1381 ", ".join('"%s": "%s"' % (j(k), j(v))
1383 1382 for k, v in ctx.extra().items()))
1384 1383
1385 1384 files = ctx.p1().status(ctx)
1386 1385 self.ui.write(',\n "modified": [%s]' %
1387 1386 ", ".join('"%s"' % j(f) for f in files[0]))
1388 1387 self.ui.write(',\n "added": [%s]' %
1389 1388 ", ".join('"%s"' % j(f) for f in files[1]))
1390 1389 self.ui.write(',\n "removed": [%s]' %
1391 1390 ", ".join('"%s"' % j(f) for f in files[2]))
1392 1391
1393 1392 elif self.ui.verbose:
1394 1393 self.ui.write(',\n "files": [%s]' %
1395 1394 ", ".join('"%s"' % j(f) for f in ctx.files()))
1396 1395
1397 1396 if copies:
1398 1397 self.ui.write(',\n "copies": {%s}' %
1399 1398 ", ".join('"%s": "%s"' % (j(k), j(v))
1400 1399 for k, v in copies))
1401 1400
1402 1401 matchfn = self.matchfn
1403 1402 if matchfn:
1404 1403 stat = self.diffopts.get('stat')
1405 1404 diff = self.diffopts.get('patch')
1406 1405 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1407 1406 node, prev = ctx.node(), ctx.p1().node()
1408 1407 if stat:
1409 1408 self.ui.pushbuffer()
1410 1409 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1411 1410 match=matchfn, stat=True)
1412 1411 self.ui.write(',\n "diffstat": "%s"' % j(self.ui.popbuffer()))
1413 1412 if diff:
1414 1413 self.ui.pushbuffer()
1415 1414 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1416 1415 match=matchfn, stat=False)
1417 1416 self.ui.write(',\n "diff": "%s"' % j(self.ui.popbuffer()))
1418 1417
1419 1418 self.ui.write("\n }")
1420 1419
1421 1420 class changeset_templater(changeset_printer):
1422 1421 '''format changeset information.'''
1423 1422
1424 1423 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1425 1424 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1426 1425 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
1427 1426 defaulttempl = {
1428 1427 'parent': '{rev}:{node|formatnode} ',
1429 1428 'manifest': '{rev}:{node|formatnode}',
1430 1429 'file_copy': '{name} ({source})',
1431 1430 'extra': '{key}={value|stringescape}'
1432 1431 }
1433 1432 # filecopy is preserved for compatibility reasons
1434 1433 defaulttempl['filecopy'] = defaulttempl['file_copy']
1435 1434 self.t = templater.templater(mapfile, {'formatnode': formatnode},
1436 1435 cache=defaulttempl)
1437 1436 if tmpl:
1438 1437 self.t.cache['changeset'] = tmpl
1439 1438
1440 1439 self.cache = {}
1441 1440
1442 1441 # find correct templates for current mode
1443 1442 tmplmodes = [
1444 1443 (True, None),
1445 1444 (self.ui.verbose, 'verbose'),
1446 1445 (self.ui.quiet, 'quiet'),
1447 1446 (self.ui.debugflag, 'debug'),
1448 1447 ]
1449 1448
1450 1449 self._parts = {'header': '', 'footer': '', 'changeset': 'changeset',
1451 1450 'docheader': '', 'docfooter': ''}
1452 1451 for mode, postfix in tmplmodes:
1453 1452 for t in self._parts:
1454 1453 cur = t
1455 1454 if postfix:
1456 1455 cur += "_" + postfix
1457 1456 if mode and cur in self.t:
1458 1457 self._parts[t] = cur
1459 1458
1460 1459 if self._parts['docheader']:
1461 1460 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1462 1461
1463 1462 def close(self):
1464 1463 if self._parts['docfooter']:
1465 1464 if not self.footer:
1466 1465 self.footer = ""
1467 1466 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1468 1467 return super(changeset_templater, self).close()
1469 1468
1470 1469 def _show(self, ctx, copies, matchfn, props):
1471 1470 '''show a single changeset or file revision'''
1472 1471 props = props.copy()
1473 1472 props.update(templatekw.keywords)
1474 1473 props['templ'] = self.t
1475 1474 props['ctx'] = ctx
1476 1475 props['repo'] = self.repo
1477 1476 props['revcache'] = {'copies': copies}
1478 1477 props['cache'] = self.cache
1479 1478
1480 1479 try:
1481 1480 # write header
1482 1481 if self._parts['header']:
1483 1482 h = templater.stringify(self.t(self._parts['header'], **props))
1484 1483 if self.buffered:
1485 1484 self.header[ctx.rev()] = h
1486 1485 else:
1487 1486 if self.lastheader != h:
1488 1487 self.lastheader = h
1489 1488 self.ui.write(h)
1490 1489
1491 1490 # write changeset metadata, then patch if requested
1492 1491 key = self._parts['changeset']
1493 1492 self.ui.write(templater.stringify(self.t(key, **props)))
1494 1493 self.showpatch(ctx, matchfn)
1495 1494
1496 1495 if self._parts['footer']:
1497 1496 if not self.footer:
1498 1497 self.footer = templater.stringify(
1499 1498 self.t(self._parts['footer'], **props))
1500 1499 except KeyError as inst:
1501 1500 msg = _("%s: no key named '%s'")
1502 1501 raise error.Abort(msg % (self.t.mapfile, inst.args[0]))
1503 1502 except SyntaxError as inst:
1504 1503 raise error.Abort('%s: %s' % (self.t.mapfile, inst.args[0]))
1505 1504
1506 1505 def gettemplate(ui, tmpl, style):
1507 1506 """
1508 1507 Find the template matching the given template spec or style.
1509 1508 """
1510 1509
1511 1510 # ui settings
1512 1511 if not tmpl and not style: # template are stronger than style
1513 1512 tmpl = ui.config('ui', 'logtemplate')
1514 1513 if tmpl:
1515 1514 try:
1516 1515 tmpl = templater.unquotestring(tmpl)
1517 1516 except SyntaxError:
1518 1517 pass
1519 1518 return tmpl, None
1520 1519 else:
1521 1520 style = util.expandpath(ui.config('ui', 'style', ''))
1522 1521
1523 1522 if not tmpl and style:
1524 1523 mapfile = style
1525 1524 if not os.path.split(mapfile)[0]:
1526 1525 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1527 1526 or templater.templatepath(mapfile))
1528 1527 if mapname:
1529 1528 mapfile = mapname
1530 1529 return None, mapfile
1531 1530
1532 1531 if not tmpl:
1533 1532 return None, None
1534 1533
1535 1534 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1536 1535
1537 1536 def show_changeset(ui, repo, opts, buffered=False):
1538 1537 """show one changeset using template or regular display.
1539 1538
1540 1539 Display format will be the first non-empty hit of:
1541 1540 1. option 'template'
1542 1541 2. option 'style'
1543 1542 3. [ui] setting 'logtemplate'
1544 1543 4. [ui] setting 'style'
1545 1544 If all of these values are either the unset or the empty string,
1546 1545 regular display via changeset_printer() is done.
1547 1546 """
1548 1547 # options
1549 1548 matchfn = None
1550 1549 if opts.get('patch') or opts.get('stat'):
1551 1550 matchfn = scmutil.matchall(repo)
1552 1551
1553 1552 if opts.get('template') == 'json':
1554 1553 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1555 1554
1556 1555 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1557 1556
1558 1557 if not tmpl and not mapfile:
1559 1558 return changeset_printer(ui, repo, matchfn, opts, buffered)
1560 1559
1561 1560 try:
1562 1561 t = changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile,
1563 1562 buffered)
1564 1563 except SyntaxError as inst:
1565 1564 raise error.Abort(inst.args[0])
1566 1565 return t
1567 1566
1568 1567 def showmarker(ui, marker):
1569 1568 """utility function to display obsolescence marker in a readable way
1570 1569
1571 1570 To be used by debug function."""
1572 1571 ui.write(hex(marker.precnode()))
1573 1572 for repl in marker.succnodes():
1574 1573 ui.write(' ')
1575 1574 ui.write(hex(repl))
1576 1575 ui.write(' %X ' % marker.flags())
1577 1576 parents = marker.parentnodes()
1578 1577 if parents is not None:
1579 1578 ui.write('{%s} ' % ', '.join(hex(p) for p in parents))
1580 1579 ui.write('(%s) ' % util.datestr(marker.date()))
1581 1580 ui.write('{%s}' % (', '.join('%r: %r' % t for t in
1582 1581 sorted(marker.metadata().items())
1583 1582 if t[0] != 'date')))
1584 1583 ui.write('\n')
1585 1584
1586 1585 def finddate(ui, repo, date):
1587 1586 """Find the tipmost changeset that matches the given date spec"""
1588 1587
1589 1588 df = util.matchdate(date)
1590 1589 m = scmutil.matchall(repo)
1591 1590 results = {}
1592 1591
1593 1592 def prep(ctx, fns):
1594 1593 d = ctx.date()
1595 1594 if df(d[0]):
1596 1595 results[ctx.rev()] = d
1597 1596
1598 1597 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1599 1598 rev = ctx.rev()
1600 1599 if rev in results:
1601 1600 ui.status(_("found revision %s from %s\n") %
1602 1601 (rev, util.datestr(results[rev])))
1603 1602 return str(rev)
1604 1603
1605 1604 raise error.Abort(_("revision matching date not found"))
1606 1605
1607 1606 def increasingwindows(windowsize=8, sizelimit=512):
1608 1607 while True:
1609 1608 yield windowsize
1610 1609 if windowsize < sizelimit:
1611 1610 windowsize *= 2
1612 1611
1613 1612 class FileWalkError(Exception):
1614 1613 pass
1615 1614
1616 1615 def walkfilerevs(repo, match, follow, revs, fncache):
1617 1616 '''Walks the file history for the matched files.
1618 1617
1619 1618 Returns the changeset revs that are involved in the file history.
1620 1619
1621 1620 Throws FileWalkError if the file history can't be walked using
1622 1621 filelogs alone.
1623 1622 '''
1624 1623 wanted = set()
1625 1624 copies = []
1626 1625 minrev, maxrev = min(revs), max(revs)
1627 1626 def filerevgen(filelog, last):
1628 1627 """
1629 1628 Only files, no patterns. Check the history of each file.
1630 1629
1631 1630 Examines filelog entries within minrev, maxrev linkrev range
1632 1631 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1633 1632 tuples in backwards order
1634 1633 """
1635 1634 cl_count = len(repo)
1636 1635 revs = []
1637 1636 for j in xrange(0, last + 1):
1638 1637 linkrev = filelog.linkrev(j)
1639 1638 if linkrev < minrev:
1640 1639 continue
1641 1640 # only yield rev for which we have the changelog, it can
1642 1641 # happen while doing "hg log" during a pull or commit
1643 1642 if linkrev >= cl_count:
1644 1643 break
1645 1644
1646 1645 parentlinkrevs = []
1647 1646 for p in filelog.parentrevs(j):
1648 1647 if p != nullrev:
1649 1648 parentlinkrevs.append(filelog.linkrev(p))
1650 1649 n = filelog.node(j)
1651 1650 revs.append((linkrev, parentlinkrevs,
1652 1651 follow and filelog.renamed(n)))
1653 1652
1654 1653 return reversed(revs)
1655 1654 def iterfiles():
1656 1655 pctx = repo['.']
1657 1656 for filename in match.files():
1658 1657 if follow:
1659 1658 if filename not in pctx:
1660 1659 raise error.Abort(_('cannot follow file not in parent '
1661 1660 'revision: "%s"') % filename)
1662 1661 yield filename, pctx[filename].filenode()
1663 1662 else:
1664 1663 yield filename, None
1665 1664 for filename_node in copies:
1666 1665 yield filename_node
1667 1666
1668 1667 for file_, node in iterfiles():
1669 1668 filelog = repo.file(file_)
1670 1669 if not len(filelog):
1671 1670 if node is None:
1672 1671 # A zero count may be a directory or deleted file, so
1673 1672 # try to find matching entries on the slow path.
1674 1673 if follow:
1675 1674 raise error.Abort(
1676 1675 _('cannot follow nonexistent file: "%s"') % file_)
1677 1676 raise FileWalkError("Cannot walk via filelog")
1678 1677 else:
1679 1678 continue
1680 1679
1681 1680 if node is None:
1682 1681 last = len(filelog) - 1
1683 1682 else:
1684 1683 last = filelog.rev(node)
1685 1684
1686 1685 # keep track of all ancestors of the file
1687 1686 ancestors = set([filelog.linkrev(last)])
1688 1687
1689 1688 # iterate from latest to oldest revision
1690 1689 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1691 1690 if not follow:
1692 1691 if rev > maxrev:
1693 1692 continue
1694 1693 else:
1695 1694 # Note that last might not be the first interesting
1696 1695 # rev to us:
1697 1696 # if the file has been changed after maxrev, we'll
1698 1697 # have linkrev(last) > maxrev, and we still need
1699 1698 # to explore the file graph
1700 1699 if rev not in ancestors:
1701 1700 continue
1702 1701 # XXX insert 1327 fix here
1703 1702 if flparentlinkrevs:
1704 1703 ancestors.update(flparentlinkrevs)
1705 1704
1706 1705 fncache.setdefault(rev, []).append(file_)
1707 1706 wanted.add(rev)
1708 1707 if copied:
1709 1708 copies.append(copied)
1710 1709
1711 1710 return wanted
1712 1711
1713 1712 class _followfilter(object):
1714 1713 def __init__(self, repo, onlyfirst=False):
1715 1714 self.repo = repo
1716 1715 self.startrev = nullrev
1717 1716 self.roots = set()
1718 1717 self.onlyfirst = onlyfirst
1719 1718
1720 1719 def match(self, rev):
1721 1720 def realparents(rev):
1722 1721 if self.onlyfirst:
1723 1722 return self.repo.changelog.parentrevs(rev)[0:1]
1724 1723 else:
1725 1724 return filter(lambda x: x != nullrev,
1726 1725 self.repo.changelog.parentrevs(rev))
1727 1726
1728 1727 if self.startrev == nullrev:
1729 1728 self.startrev = rev
1730 1729 return True
1731 1730
1732 1731 if rev > self.startrev:
1733 1732 # forward: all descendants
1734 1733 if not self.roots:
1735 1734 self.roots.add(self.startrev)
1736 1735 for parent in realparents(rev):
1737 1736 if parent in self.roots:
1738 1737 self.roots.add(rev)
1739 1738 return True
1740 1739 else:
1741 1740 # backwards: all parents
1742 1741 if not self.roots:
1743 1742 self.roots.update(realparents(self.startrev))
1744 1743 if rev in self.roots:
1745 1744 self.roots.remove(rev)
1746 1745 self.roots.update(realparents(rev))
1747 1746 return True
1748 1747
1749 1748 return False
1750 1749
1751 1750 def walkchangerevs(repo, match, opts, prepare):
1752 1751 '''Iterate over files and the revs in which they changed.
1753 1752
1754 1753 Callers most commonly need to iterate backwards over the history
1755 1754 in which they are interested. Doing so has awful (quadratic-looking)
1756 1755 performance, so we use iterators in a "windowed" way.
1757 1756
1758 1757 We walk a window of revisions in the desired order. Within the
1759 1758 window, we first walk forwards to gather data, then in the desired
1760 1759 order (usually backwards) to display it.
1761 1760
1762 1761 This function returns an iterator yielding contexts. Before
1763 1762 yielding each context, the iterator will first call the prepare
1764 1763 function on each context in the window in forward order.'''
1765 1764
1766 1765 follow = opts.get('follow') or opts.get('follow_first')
1767 1766 revs = _logrevs(repo, opts)
1768 1767 if not revs:
1769 1768 return []
1770 1769 wanted = set()
1771 1770 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1772 1771 opts.get('removed'))
1773 1772 fncache = {}
1774 1773 change = repo.changectx
1775 1774
1776 1775 # First step is to fill wanted, the set of revisions that we want to yield.
1777 1776 # When it does not induce extra cost, we also fill fncache for revisions in
1778 1777 # wanted: a cache of filenames that were changed (ctx.files()) and that
1779 1778 # match the file filtering conditions.
1780 1779
1781 1780 if match.always():
1782 1781 # No files, no patterns. Display all revs.
1783 1782 wanted = revs
1784 1783 elif not slowpath:
1785 1784 # We only have to read through the filelog to find wanted revisions
1786 1785
1787 1786 try:
1788 1787 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1789 1788 except FileWalkError:
1790 1789 slowpath = True
1791 1790
1792 1791 # We decided to fall back to the slowpath because at least one
1793 1792 # of the paths was not a file. Check to see if at least one of them
1794 1793 # existed in history, otherwise simply return
1795 1794 for path in match.files():
1796 1795 if path == '.' or path in repo.store:
1797 1796 break
1798 1797 else:
1799 1798 return []
1800 1799
1801 1800 if slowpath:
1802 1801 # We have to read the changelog to match filenames against
1803 1802 # changed files
1804 1803
1805 1804 if follow:
1806 1805 raise error.Abort(_('can only follow copies/renames for explicit '
1807 1806 'filenames'))
1808 1807
1809 1808 # The slow path checks files modified in every changeset.
1810 1809 # This is really slow on large repos, so compute the set lazily.
1811 1810 class lazywantedset(object):
1812 1811 def __init__(self):
1813 1812 self.set = set()
1814 1813 self.revs = set(revs)
1815 1814
1816 1815 # No need to worry about locality here because it will be accessed
1817 1816 # in the same order as the increasing window below.
1818 1817 def __contains__(self, value):
1819 1818 if value in self.set:
1820 1819 return True
1821 1820 elif not value in self.revs:
1822 1821 return False
1823 1822 else:
1824 1823 self.revs.discard(value)
1825 1824 ctx = change(value)
1826 1825 matches = filter(match, ctx.files())
1827 1826 if matches:
1828 1827 fncache[value] = matches
1829 1828 self.set.add(value)
1830 1829 return True
1831 1830 return False
1832 1831
1833 1832 def discard(self, value):
1834 1833 self.revs.discard(value)
1835 1834 self.set.discard(value)
1836 1835
1837 1836 wanted = lazywantedset()
1838 1837
1839 1838 # it might be worthwhile to do this in the iterator if the rev range
1840 1839 # is descending and the prune args are all within that range
1841 1840 for rev in opts.get('prune', ()):
1842 1841 rev = repo[rev].rev()
1843 1842 ff = _followfilter(repo)
1844 1843 stop = min(revs[0], revs[-1])
1845 1844 for x in xrange(rev, stop - 1, -1):
1846 1845 if ff.match(x):
1847 1846 wanted = wanted - [x]
1848 1847
1849 1848 # Now that wanted is correctly initialized, we can iterate over the
1850 1849 # revision range, yielding only revisions in wanted.
1851 1850 def iterate():
1852 1851 if follow and match.always():
1853 1852 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1854 1853 def want(rev):
1855 1854 return ff.match(rev) and rev in wanted
1856 1855 else:
1857 1856 def want(rev):
1858 1857 return rev in wanted
1859 1858
1860 1859 it = iter(revs)
1861 1860 stopiteration = False
1862 1861 for windowsize in increasingwindows():
1863 1862 nrevs = []
1864 1863 for i in xrange(windowsize):
1865 1864 rev = next(it, None)
1866 1865 if rev is None:
1867 1866 stopiteration = True
1868 1867 break
1869 1868 elif want(rev):
1870 1869 nrevs.append(rev)
1871 1870 for rev in sorted(nrevs):
1872 1871 fns = fncache.get(rev)
1873 1872 ctx = change(rev)
1874 1873 if not fns:
1875 1874 def fns_generator():
1876 1875 for f in ctx.files():
1877 1876 if match(f):
1878 1877 yield f
1879 1878 fns = fns_generator()
1880 1879 prepare(ctx, fns)
1881 1880 for rev in nrevs:
1882 1881 yield change(rev)
1883 1882
1884 1883 if stopiteration:
1885 1884 break
1886 1885
1887 1886 return iterate()
1888 1887
1889 1888 def _makefollowlogfilematcher(repo, files, followfirst):
1890 1889 # When displaying a revision with --patch --follow FILE, we have
1891 1890 # to know which file of the revision must be diffed. With
1892 1891 # --follow, we want the names of the ancestors of FILE in the
1893 1892 # revision, stored in "fcache". "fcache" is populated by
1894 1893 # reproducing the graph traversal already done by --follow revset
1895 1894 # and relating linkrevs to file names (which is not "correct" but
1896 1895 # good enough).
1897 1896 fcache = {}
1898 1897 fcacheready = [False]
1899 1898 pctx = repo['.']
1900 1899
1901 1900 def populate():
1902 1901 for fn in files:
1903 1902 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
1904 1903 for c in i:
1905 1904 fcache.setdefault(c.linkrev(), set()).add(c.path())
1906 1905
1907 1906 def filematcher(rev):
1908 1907 if not fcacheready[0]:
1909 1908 # Lazy initialization
1910 1909 fcacheready[0] = True
1911 1910 populate()
1912 1911 return scmutil.matchfiles(repo, fcache.get(rev, []))
1913 1912
1914 1913 return filematcher
1915 1914
1916 1915 def _makenofollowlogfilematcher(repo, pats, opts):
1917 1916 '''hook for extensions to override the filematcher for non-follow cases'''
1918 1917 return None
1919 1918
1920 1919 def _makelogrevset(repo, pats, opts, revs):
1921 1920 """Return (expr, filematcher) where expr is a revset string built
1922 1921 from log options and file patterns or None. If --stat or --patch
1923 1922 are not passed filematcher is None. Otherwise it is a callable
1924 1923 taking a revision number and returning a match objects filtering
1925 1924 the files to be detailed when displaying the revision.
1926 1925 """
1927 1926 opt2revset = {
1928 1927 'no_merges': ('not merge()', None),
1929 1928 'only_merges': ('merge()', None),
1930 1929 '_ancestors': ('ancestors(%(val)s)', None),
1931 1930 '_fancestors': ('_firstancestors(%(val)s)', None),
1932 1931 '_descendants': ('descendants(%(val)s)', None),
1933 1932 '_fdescendants': ('_firstdescendants(%(val)s)', None),
1934 1933 '_matchfiles': ('_matchfiles(%(val)s)', None),
1935 1934 'date': ('date(%(val)r)', None),
1936 1935 'branch': ('branch(%(val)r)', ' or '),
1937 1936 '_patslog': ('filelog(%(val)r)', ' or '),
1938 1937 '_patsfollow': ('follow(%(val)r)', ' or '),
1939 1938 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
1940 1939 'keyword': ('keyword(%(val)r)', ' or '),
1941 1940 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
1942 1941 'user': ('user(%(val)r)', ' or '),
1943 1942 }
1944 1943
1945 1944 opts = dict(opts)
1946 1945 # follow or not follow?
1947 1946 follow = opts.get('follow') or opts.get('follow_first')
1948 1947 if opts.get('follow_first'):
1949 1948 followfirst = 1
1950 1949 else:
1951 1950 followfirst = 0
1952 1951 # --follow with FILE behavior depends on revs...
1953 1952 it = iter(revs)
1954 1953 startrev = it.next()
1955 1954 followdescendants = startrev < next(it, startrev)
1956 1955
1957 1956 # branch and only_branch are really aliases and must be handled at
1958 1957 # the same time
1959 1958 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
1960 1959 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
1961 1960 # pats/include/exclude are passed to match.match() directly in
1962 1961 # _matchfiles() revset but walkchangerevs() builds its matcher with
1963 1962 # scmutil.match(). The difference is input pats are globbed on
1964 1963 # platforms without shell expansion (windows).
1965 1964 wctx = repo[None]
1966 1965 match, pats = scmutil.matchandpats(wctx, pats, opts)
1967 1966 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1968 1967 opts.get('removed'))
1969 1968 if not slowpath:
1970 1969 for f in match.files():
1971 1970 if follow and f not in wctx:
1972 1971 # If the file exists, it may be a directory, so let it
1973 1972 # take the slow path.
1974 1973 if os.path.exists(repo.wjoin(f)):
1975 1974 slowpath = True
1976 1975 continue
1977 1976 else:
1978 1977 raise error.Abort(_('cannot follow file not in parent '
1979 1978 'revision: "%s"') % f)
1980 1979 filelog = repo.file(f)
1981 1980 if not filelog:
1982 1981 # A zero count may be a directory or deleted file, so
1983 1982 # try to find matching entries on the slow path.
1984 1983 if follow:
1985 1984 raise error.Abort(
1986 1985 _('cannot follow nonexistent file: "%s"') % f)
1987 1986 slowpath = True
1988 1987
1989 1988 # We decided to fall back to the slowpath because at least one
1990 1989 # of the paths was not a file. Check to see if at least one of them
1991 1990 # existed in history - in that case, we'll continue down the
1992 1991 # slowpath; otherwise, we can turn off the slowpath
1993 1992 if slowpath:
1994 1993 for path in match.files():
1995 1994 if path == '.' or path in repo.store:
1996 1995 break
1997 1996 else:
1998 1997 slowpath = False
1999 1998
2000 1999 fpats = ('_patsfollow', '_patsfollowfirst')
2001 2000 fnopats = (('_ancestors', '_fancestors'),
2002 2001 ('_descendants', '_fdescendants'))
2003 2002 if slowpath:
2004 2003 # See walkchangerevs() slow path.
2005 2004 #
2006 2005 # pats/include/exclude cannot be represented as separate
2007 2006 # revset expressions as their filtering logic applies at file
2008 2007 # level. For instance "-I a -X a" matches a revision touching
2009 2008 # "a" and "b" while "file(a) and not file(b)" does
2010 2009 # not. Besides, filesets are evaluated against the working
2011 2010 # directory.
2012 2011 matchargs = ['r:', 'd:relpath']
2013 2012 for p in pats:
2014 2013 matchargs.append('p:' + p)
2015 2014 for p in opts.get('include', []):
2016 2015 matchargs.append('i:' + p)
2017 2016 for p in opts.get('exclude', []):
2018 2017 matchargs.append('x:' + p)
2019 2018 matchargs = ','.join(('%r' % p) for p in matchargs)
2020 2019 opts['_matchfiles'] = matchargs
2021 2020 if follow:
2022 2021 opts[fnopats[0][followfirst]] = '.'
2023 2022 else:
2024 2023 if follow:
2025 2024 if pats:
2026 2025 # follow() revset interprets its file argument as a
2027 2026 # manifest entry, so use match.files(), not pats.
2028 2027 opts[fpats[followfirst]] = list(match.files())
2029 2028 else:
2030 2029 op = fnopats[followdescendants][followfirst]
2031 2030 opts[op] = 'rev(%d)' % startrev
2032 2031 else:
2033 2032 opts['_patslog'] = list(pats)
2034 2033
2035 2034 filematcher = None
2036 2035 if opts.get('patch') or opts.get('stat'):
2037 2036 # When following files, track renames via a special matcher.
2038 2037 # If we're forced to take the slowpath it means we're following
2039 2038 # at least one pattern/directory, so don't bother with rename tracking.
2040 2039 if follow and not match.always() and not slowpath:
2041 2040 # _makefollowlogfilematcher expects its files argument to be
2042 2041 # relative to the repo root, so use match.files(), not pats.
2043 2042 filematcher = _makefollowlogfilematcher(repo, match.files(),
2044 2043 followfirst)
2045 2044 else:
2046 2045 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2047 2046 if filematcher is None:
2048 2047 filematcher = lambda rev: match
2049 2048
2050 2049 expr = []
2051 2050 for op, val in sorted(opts.iteritems()):
2052 2051 if not val:
2053 2052 continue
2054 2053 if op not in opt2revset:
2055 2054 continue
2056 2055 revop, andor = opt2revset[op]
2057 2056 if '%(val)' not in revop:
2058 2057 expr.append(revop)
2059 2058 else:
2060 2059 if not isinstance(val, list):
2061 2060 e = revop % {'val': val}
2062 2061 else:
2063 2062 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2064 2063 expr.append(e)
2065 2064
2066 2065 if expr:
2067 2066 expr = '(' + ' and '.join(expr) + ')'
2068 2067 else:
2069 2068 expr = None
2070 2069 return expr, filematcher
2071 2070
2072 2071 def _logrevs(repo, opts):
2073 2072 # Default --rev value depends on --follow but --follow behavior
2074 2073 # depends on revisions resolved from --rev...
2075 2074 follow = opts.get('follow') or opts.get('follow_first')
2076 2075 if opts.get('rev'):
2077 2076 revs = scmutil.revrange(repo, opts['rev'])
2078 2077 elif follow and repo.dirstate.p1() == nullid:
2079 2078 revs = revset.baseset()
2080 2079 elif follow:
2081 2080 revs = repo.revs('reverse(:.)')
2082 2081 else:
2083 2082 revs = revset.spanset(repo)
2084 2083 revs.reverse()
2085 2084 return revs
2086 2085
2087 2086 def getgraphlogrevs(repo, pats, opts):
2088 2087 """Return (revs, expr, filematcher) where revs is an iterable of
2089 2088 revision numbers, expr is a revset string built from log options
2090 2089 and file patterns or None, and used to filter 'revs'. If --stat or
2091 2090 --patch are not passed filematcher is None. Otherwise it is a
2092 2091 callable taking a revision number and returning a match objects
2093 2092 filtering the files to be detailed when displaying the revision.
2094 2093 """
2095 2094 limit = loglimit(opts)
2096 2095 revs = _logrevs(repo, opts)
2097 2096 if not revs:
2098 2097 return revset.baseset(), None, None
2099 2098 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2100 2099 if opts.get('rev'):
2101 2100 # User-specified revs might be unsorted, but don't sort before
2102 2101 # _makelogrevset because it might depend on the order of revs
2103 2102 revs.sort(reverse=True)
2104 2103 if expr:
2105 2104 # Revset matchers often operate faster on revisions in changelog
2106 2105 # order, because most filters deal with the changelog.
2107 2106 revs.reverse()
2108 2107 matcher = revset.match(repo.ui, expr)
2109 2108 # Revset matches can reorder revisions. "A or B" typically returns
2110 2109 # returns the revision matching A then the revision matching B. Sort
2111 2110 # again to fix that.
2112 2111 revs = matcher(repo, revs)
2113 2112 revs.sort(reverse=True)
2114 2113 if limit is not None:
2115 2114 limitedrevs = []
2116 2115 for idx, rev in enumerate(revs):
2117 2116 if idx >= limit:
2118 2117 break
2119 2118 limitedrevs.append(rev)
2120 2119 revs = revset.baseset(limitedrevs)
2121 2120
2122 2121 return revs, expr, filematcher
2123 2122
2124 2123 def getlogrevs(repo, pats, opts):
2125 2124 """Return (revs, expr, filematcher) where revs is an iterable of
2126 2125 revision numbers, expr is a revset string built from log options
2127 2126 and file patterns or None, and used to filter 'revs'. If --stat or
2128 2127 --patch are not passed filematcher is None. Otherwise it is a
2129 2128 callable taking a revision number and returning a match objects
2130 2129 filtering the files to be detailed when displaying the revision.
2131 2130 """
2132 2131 limit = loglimit(opts)
2133 2132 revs = _logrevs(repo, opts)
2134 2133 if not revs:
2135 2134 return revset.baseset([]), None, None
2136 2135 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2137 2136 if expr:
2138 2137 # Revset matchers often operate faster on revisions in changelog
2139 2138 # order, because most filters deal with the changelog.
2140 2139 if not opts.get('rev'):
2141 2140 revs.reverse()
2142 2141 matcher = revset.match(repo.ui, expr)
2143 2142 # Revset matches can reorder revisions. "A or B" typically returns
2144 2143 # returns the revision matching A then the revision matching B. Sort
2145 2144 # again to fix that.
2146 2145 revs = matcher(repo, revs)
2147 2146 if not opts.get('rev'):
2148 2147 revs.sort(reverse=True)
2149 2148 if limit is not None:
2150 2149 limitedrevs = []
2151 2150 for idx, r in enumerate(revs):
2152 2151 if limit <= idx:
2153 2152 break
2154 2153 limitedrevs.append(r)
2155 2154 revs = revset.baseset(limitedrevs)
2156 2155
2157 2156 return revs, expr, filematcher
2158 2157
2159 2158 def _graphnodeformatter(ui, displayer):
2160 2159 spec = ui.config('ui', 'graphnodetemplate')
2161 2160 if not spec:
2162 2161 return templatekw.showgraphnode # fast path for "{graphnode}"
2163 2162
2164 2163 templ = formatter.gettemplater(ui, 'graphnode', spec)
2165 2164 cache = {}
2166 2165 if isinstance(displayer, changeset_templater):
2167 2166 cache = displayer.cache # reuse cache of slow templates
2168 2167 props = templatekw.keywords.copy()
2169 2168 props['templ'] = templ
2170 2169 props['cache'] = cache
2171 2170 def formatnode(repo, ctx):
2172 2171 props['ctx'] = ctx
2173 2172 props['repo'] = repo
2174 2173 props['revcache'] = {}
2175 2174 return templater.stringify(templ('graphnode', **props))
2176 2175 return formatnode
2177 2176
2178 2177 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2179 2178 filematcher=None):
2180 2179 formatnode = _graphnodeformatter(ui, displayer)
2181 2180 seen, state = [], graphmod.asciistate()
2182 2181 for rev, type, ctx, parents in dag:
2183 2182 char = formatnode(repo, ctx)
2184 2183 copies = None
2185 2184 if getrenamed and ctx.rev():
2186 2185 copies = []
2187 2186 for fn in ctx.files():
2188 2187 rename = getrenamed(fn, ctx.rev())
2189 2188 if rename:
2190 2189 copies.append((fn, rename[0]))
2191 2190 revmatchfn = None
2192 2191 if filematcher is not None:
2193 2192 revmatchfn = filematcher(ctx.rev())
2194 2193 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2195 2194 lines = displayer.hunk.pop(rev).split('\n')
2196 2195 if not lines[-1]:
2197 2196 del lines[-1]
2198 2197 displayer.flush(ctx)
2199 2198 edges = edgefn(type, char, lines, seen, rev, parents)
2200 2199 for type, char, lines, coldata in edges:
2201 2200 graphmod.ascii(ui, state, type, char, lines, coldata)
2202 2201 displayer.close()
2203 2202
2204 2203 def graphlog(ui, repo, *pats, **opts):
2205 2204 # Parameters are identical to log command ones
2206 2205 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2207 2206 revdag = graphmod.dagwalker(repo, revs)
2208 2207
2209 2208 getrenamed = None
2210 2209 if opts.get('copies'):
2211 2210 endrev = None
2212 2211 if opts.get('rev'):
2213 2212 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2214 2213 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2215 2214 displayer = show_changeset(ui, repo, opts, buffered=True)
2216 2215 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2217 2216 filematcher)
2218 2217
2219 2218 def checkunsupportedgraphflags(pats, opts):
2220 2219 for op in ["newest_first"]:
2221 2220 if op in opts and opts[op]:
2222 2221 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2223 2222 % op.replace("_", "-"))
2224 2223
2225 2224 def graphrevs(repo, nodes, opts):
2226 2225 limit = loglimit(opts)
2227 2226 nodes.reverse()
2228 2227 if limit is not None:
2229 2228 nodes = nodes[:limit]
2230 2229 return graphmod.nodes(repo, nodes)
2231 2230
2232 2231 def add(ui, repo, match, prefix, explicitonly, **opts):
2233 2232 join = lambda f: os.path.join(prefix, f)
2234 2233 bad = []
2235 2234
2236 2235 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2237 2236 names = []
2238 2237 wctx = repo[None]
2239 2238 cca = None
2240 2239 abort, warn = scmutil.checkportabilityalert(ui)
2241 2240 if abort or warn:
2242 2241 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2243 2242
2244 2243 badmatch = matchmod.badmatch(match, badfn)
2245 2244 dirstate = repo.dirstate
2246 2245 # We don't want to just call wctx.walk here, since it would return a lot of
2247 2246 # clean files, which we aren't interested in and takes time.
2248 2247 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2249 2248 True, False, full=False)):
2250 2249 exact = match.exact(f)
2251 2250 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2252 2251 if cca:
2253 2252 cca(f)
2254 2253 names.append(f)
2255 2254 if ui.verbose or not exact:
2256 2255 ui.status(_('adding %s\n') % match.rel(f))
2257 2256
2258 2257 for subpath in sorted(wctx.substate):
2259 2258 sub = wctx.sub(subpath)
2260 2259 try:
2261 2260 submatch = matchmod.narrowmatcher(subpath, match)
2262 2261 if opts.get('subrepos'):
2263 2262 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2264 2263 else:
2265 2264 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2266 2265 except error.LookupError:
2267 2266 ui.status(_("skipping missing subrepository: %s\n")
2268 2267 % join(subpath))
2269 2268
2270 2269 if not opts.get('dry_run'):
2271 2270 rejected = wctx.add(names, prefix)
2272 2271 bad.extend(f for f in rejected if f in match.files())
2273 2272 return bad
2274 2273
2275 2274 def forget(ui, repo, match, prefix, explicitonly):
2276 2275 join = lambda f: os.path.join(prefix, f)
2277 2276 bad = []
2278 2277 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2279 2278 wctx = repo[None]
2280 2279 forgot = []
2281 2280
2282 2281 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2283 2282 forget = sorted(s[0] + s[1] + s[3] + s[6])
2284 2283 if explicitonly:
2285 2284 forget = [f for f in forget if match.exact(f)]
2286 2285
2287 2286 for subpath in sorted(wctx.substate):
2288 2287 sub = wctx.sub(subpath)
2289 2288 try:
2290 2289 submatch = matchmod.narrowmatcher(subpath, match)
2291 2290 subbad, subforgot = sub.forget(submatch, prefix)
2292 2291 bad.extend([subpath + '/' + f for f in subbad])
2293 2292 forgot.extend([subpath + '/' + f for f in subforgot])
2294 2293 except error.LookupError:
2295 2294 ui.status(_("skipping missing subrepository: %s\n")
2296 2295 % join(subpath))
2297 2296
2298 2297 if not explicitonly:
2299 2298 for f in match.files():
2300 2299 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2301 2300 if f not in forgot:
2302 2301 if repo.wvfs.exists(f):
2303 2302 # Don't complain if the exact case match wasn't given.
2304 2303 # But don't do this until after checking 'forgot', so
2305 2304 # that subrepo files aren't normalized, and this op is
2306 2305 # purely from data cached by the status walk above.
2307 2306 if repo.dirstate.normalize(f) in repo.dirstate:
2308 2307 continue
2309 2308 ui.warn(_('not removing %s: '
2310 2309 'file is already untracked\n')
2311 2310 % match.rel(f))
2312 2311 bad.append(f)
2313 2312
2314 2313 for f in forget:
2315 2314 if ui.verbose or not match.exact(f):
2316 2315 ui.status(_('removing %s\n') % match.rel(f))
2317 2316
2318 2317 rejected = wctx.forget(forget, prefix)
2319 2318 bad.extend(f for f in rejected if f in match.files())
2320 2319 forgot.extend(f for f in forget if f not in rejected)
2321 2320 return bad, forgot
2322 2321
2323 2322 def files(ui, ctx, m, fm, fmt, subrepos):
2324 2323 rev = ctx.rev()
2325 2324 ret = 1
2326 2325 ds = ctx.repo().dirstate
2327 2326
2328 2327 for f in ctx.matches(m):
2329 2328 if rev is None and ds[f] == 'r':
2330 2329 continue
2331 2330 fm.startitem()
2332 2331 if ui.verbose:
2333 2332 fc = ctx[f]
2334 2333 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2335 2334 fm.data(abspath=f)
2336 2335 fm.write('path', fmt, m.rel(f))
2337 2336 ret = 0
2338 2337
2339 2338 for subpath in sorted(ctx.substate):
2340 2339 def matchessubrepo(subpath):
2341 2340 return (m.always() or m.exact(subpath)
2342 2341 or any(f.startswith(subpath + '/') for f in m.files()))
2343 2342
2344 2343 if subrepos or matchessubrepo(subpath):
2345 2344 sub = ctx.sub(subpath)
2346 2345 try:
2347 2346 submatch = matchmod.narrowmatcher(subpath, m)
2348 2347 if sub.printfiles(ui, submatch, fm, fmt, subrepos) == 0:
2349 2348 ret = 0
2350 2349 except error.LookupError:
2351 2350 ui.status(_("skipping missing subrepository: %s\n")
2352 2351 % m.abs(subpath))
2353 2352
2354 2353 return ret
2355 2354
2356 2355 def remove(ui, repo, m, prefix, after, force, subrepos):
2357 2356 join = lambda f: os.path.join(prefix, f)
2358 2357 ret = 0
2359 2358 s = repo.status(match=m, clean=True)
2360 2359 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2361 2360
2362 2361 wctx = repo[None]
2363 2362
2364 2363 for subpath in sorted(wctx.substate):
2365 2364 def matchessubrepo(matcher, subpath):
2366 2365 if matcher.exact(subpath):
2367 2366 return True
2368 2367 for f in matcher.files():
2369 2368 if f.startswith(subpath):
2370 2369 return True
2371 2370 return False
2372 2371
2373 2372 if subrepos or matchessubrepo(m, subpath):
2374 2373 sub = wctx.sub(subpath)
2375 2374 try:
2376 2375 submatch = matchmod.narrowmatcher(subpath, m)
2377 2376 if sub.removefiles(submatch, prefix, after, force, subrepos):
2378 2377 ret = 1
2379 2378 except error.LookupError:
2380 2379 ui.status(_("skipping missing subrepository: %s\n")
2381 2380 % join(subpath))
2382 2381
2383 2382 # warn about failure to delete explicit files/dirs
2384 2383 deleteddirs = util.dirs(deleted)
2385 2384 for f in m.files():
2386 2385 def insubrepo():
2387 2386 for subpath in wctx.substate:
2388 2387 if f.startswith(subpath):
2389 2388 return True
2390 2389 return False
2391 2390
2392 2391 isdir = f in deleteddirs or wctx.hasdir(f)
2393 2392 if f in repo.dirstate or isdir or f == '.' or insubrepo():
2394 2393 continue
2395 2394
2396 2395 if repo.wvfs.exists(f):
2397 2396 if repo.wvfs.isdir(f):
2398 2397 ui.warn(_('not removing %s: no tracked files\n')
2399 2398 % m.rel(f))
2400 2399 else:
2401 2400 ui.warn(_('not removing %s: file is untracked\n')
2402 2401 % m.rel(f))
2403 2402 # missing files will generate a warning elsewhere
2404 2403 ret = 1
2405 2404
2406 2405 if force:
2407 2406 list = modified + deleted + clean + added
2408 2407 elif after:
2409 2408 list = deleted
2410 2409 for f in modified + added + clean:
2411 2410 ui.warn(_('not removing %s: file still exists\n') % m.rel(f))
2412 2411 ret = 1
2413 2412 else:
2414 2413 list = deleted + clean
2415 2414 for f in modified:
2416 2415 ui.warn(_('not removing %s: file is modified (use -f'
2417 2416 ' to force removal)\n') % m.rel(f))
2418 2417 ret = 1
2419 2418 for f in added:
2420 2419 ui.warn(_('not removing %s: file has been marked for add'
2421 2420 ' (use forget to undo)\n') % m.rel(f))
2422 2421 ret = 1
2423 2422
2424 2423 for f in sorted(list):
2425 2424 if ui.verbose or not m.exact(f):
2426 2425 ui.status(_('removing %s\n') % m.rel(f))
2427 2426
2428 2427 wlock = repo.wlock()
2429 2428 try:
2430 2429 if not after:
2431 2430 for f in list:
2432 2431 if f in added:
2433 2432 continue # we never unlink added files on remove
2434 2433 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
2435 2434 repo[None].forget(list)
2436 2435 finally:
2437 2436 wlock.release()
2438 2437
2439 2438 return ret
2440 2439
2441 2440 def cat(ui, repo, ctx, matcher, prefix, **opts):
2442 2441 err = 1
2443 2442
2444 2443 def write(path):
2445 2444 fp = makefileobj(repo, opts.get('output'), ctx.node(),
2446 2445 pathname=os.path.join(prefix, path))
2447 2446 data = ctx[path].data()
2448 2447 if opts.get('decode'):
2449 2448 data = repo.wwritedata(path, data)
2450 2449 fp.write(data)
2451 2450 fp.close()
2452 2451
2453 2452 # Automation often uses hg cat on single files, so special case it
2454 2453 # for performance to avoid the cost of parsing the manifest.
2455 2454 if len(matcher.files()) == 1 and not matcher.anypats():
2456 2455 file = matcher.files()[0]
2457 2456 mf = repo.manifest
2458 2457 mfnode = ctx.manifestnode()
2459 2458 if mfnode and mf.find(mfnode, file)[0]:
2460 2459 write(file)
2461 2460 return 0
2462 2461
2463 2462 # Don't warn about "missing" files that are really in subrepos
2464 2463 def badfn(path, msg):
2465 2464 for subpath in ctx.substate:
2466 2465 if path.startswith(subpath):
2467 2466 return
2468 2467 matcher.bad(path, msg)
2469 2468
2470 2469 for abs in ctx.walk(matchmod.badmatch(matcher, badfn)):
2471 2470 write(abs)
2472 2471 err = 0
2473 2472
2474 2473 for subpath in sorted(ctx.substate):
2475 2474 sub = ctx.sub(subpath)
2476 2475 try:
2477 2476 submatch = matchmod.narrowmatcher(subpath, matcher)
2478 2477
2479 2478 if not sub.cat(submatch, os.path.join(prefix, sub._path),
2480 2479 **opts):
2481 2480 err = 0
2482 2481 except error.RepoLookupError:
2483 2482 ui.status(_("skipping missing subrepository: %s\n")
2484 2483 % os.path.join(prefix, subpath))
2485 2484
2486 2485 return err
2487 2486
2488 2487 def commit(ui, repo, commitfunc, pats, opts):
2489 2488 '''commit the specified files or all outstanding changes'''
2490 2489 date = opts.get('date')
2491 2490 if date:
2492 2491 opts['date'] = util.parsedate(date)
2493 2492 message = logmessage(ui, opts)
2494 2493 matcher = scmutil.match(repo[None], pats, opts)
2495 2494
2496 2495 # extract addremove carefully -- this function can be called from a command
2497 2496 # that doesn't support addremove
2498 2497 if opts.get('addremove'):
2499 2498 if scmutil.addremove(repo, matcher, "", opts) != 0:
2500 2499 raise error.Abort(
2501 2500 _("failed to mark all new/missing files as added/removed"))
2502 2501
2503 2502 return commitfunc(ui, repo, message, matcher, opts)
2504 2503
2505 2504 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2506 2505 # avoid cycle context -> subrepo -> cmdutil
2507 2506 import context
2508 2507
2509 2508 # amend will reuse the existing user if not specified, but the obsolete
2510 2509 # marker creation requires that the current user's name is specified.
2511 2510 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2512 2511 ui.username() # raise exception if username not set
2513 2512
2514 2513 ui.note(_('amending changeset %s\n') % old)
2515 2514 base = old.p1()
2516 2515 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2517 2516
2518 2517 wlock = lock = newid = None
2519 2518 try:
2520 2519 wlock = repo.wlock()
2521 2520 lock = repo.lock()
2522 2521 tr = repo.transaction('amend')
2523 2522 try:
2524 2523 # See if we got a message from -m or -l, if not, open the editor
2525 2524 # with the message of the changeset to amend
2526 2525 message = logmessage(ui, opts)
2527 2526 # ensure logfile does not conflict with later enforcement of the
2528 2527 # message. potential logfile content has been processed by
2529 2528 # `logmessage` anyway.
2530 2529 opts.pop('logfile')
2531 2530 # First, do a regular commit to record all changes in the working
2532 2531 # directory (if there are any)
2533 2532 ui.callhooks = False
2534 2533 activebookmark = repo._activebookmark
2535 2534 try:
2536 2535 repo._activebookmark = None
2537 2536 opts['message'] = 'temporary amend commit for %s' % old
2538 2537 node = commit(ui, repo, commitfunc, pats, opts)
2539 2538 finally:
2540 2539 repo._activebookmark = activebookmark
2541 2540 ui.callhooks = True
2542 2541 ctx = repo[node]
2543 2542
2544 2543 # Participating changesets:
2545 2544 #
2546 2545 # node/ctx o - new (intermediate) commit that contains changes
2547 2546 # | from working dir to go into amending commit
2548 2547 # | (or a workingctx if there were no changes)
2549 2548 # |
2550 2549 # old o - changeset to amend
2551 2550 # |
2552 2551 # base o - parent of amending changeset
2553 2552
2554 2553 # Update extra dict from amended commit (e.g. to preserve graft
2555 2554 # source)
2556 2555 extra.update(old.extra())
2557 2556
2558 2557 # Also update it from the intermediate commit or from the wctx
2559 2558 extra.update(ctx.extra())
2560 2559
2561 2560 if len(old.parents()) > 1:
2562 2561 # ctx.files() isn't reliable for merges, so fall back to the
2563 2562 # slower repo.status() method
2564 2563 files = set([fn for st in repo.status(base, old)[:3]
2565 2564 for fn in st])
2566 2565 else:
2567 2566 files = set(old.files())
2568 2567
2569 2568 # Second, we use either the commit we just did, or if there were no
2570 2569 # changes the parent of the working directory as the version of the
2571 2570 # files in the final amend commit
2572 2571 if node:
2573 2572 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2574 2573
2575 2574 user = ctx.user()
2576 2575 date = ctx.date()
2577 2576 # Recompute copies (avoid recording a -> b -> a)
2578 2577 copied = copies.pathcopies(base, ctx)
2579 2578 if old.p2:
2580 2579 copied.update(copies.pathcopies(old.p2(), ctx))
2581 2580
2582 2581 # Prune files which were reverted by the updates: if old
2583 2582 # introduced file X and our intermediate commit, node,
2584 2583 # renamed that file, then those two files are the same and
2585 2584 # we can discard X from our list of files. Likewise if X
2586 2585 # was deleted, it's no longer relevant
2587 2586 files.update(ctx.files())
2588 2587
2589 2588 def samefile(f):
2590 2589 if f in ctx.manifest():
2591 2590 a = ctx.filectx(f)
2592 2591 if f in base.manifest():
2593 2592 b = base.filectx(f)
2594 2593 return (not a.cmp(b)
2595 2594 and a.flags() == b.flags())
2596 2595 else:
2597 2596 return False
2598 2597 else:
2599 2598 return f not in base.manifest()
2600 2599 files = [f for f in files if not samefile(f)]
2601 2600
2602 2601 def filectxfn(repo, ctx_, path):
2603 2602 try:
2604 2603 fctx = ctx[path]
2605 2604 flags = fctx.flags()
2606 2605 mctx = context.memfilectx(repo,
2607 2606 fctx.path(), fctx.data(),
2608 2607 islink='l' in flags,
2609 2608 isexec='x' in flags,
2610 2609 copied=copied.get(path))
2611 2610 return mctx
2612 2611 except KeyError:
2613 2612 return None
2614 2613 else:
2615 2614 ui.note(_('copying changeset %s to %s\n') % (old, base))
2616 2615
2617 2616 # Use version of files as in the old cset
2618 2617 def filectxfn(repo, ctx_, path):
2619 2618 try:
2620 2619 return old.filectx(path)
2621 2620 except KeyError:
2622 2621 return None
2623 2622
2624 2623 user = opts.get('user') or old.user()
2625 2624 date = opts.get('date') or old.date()
2626 2625 editform = mergeeditform(old, 'commit.amend')
2627 2626 editor = getcommiteditor(editform=editform, **opts)
2628 2627 if not message:
2629 2628 editor = getcommiteditor(edit=True, editform=editform)
2630 2629 message = old.description()
2631 2630
2632 2631 pureextra = extra.copy()
2633 2632 if 'amend_source' in pureextra:
2634 2633 del pureextra['amend_source']
2635 2634 pureoldextra = old.extra()
2636 2635 if 'amend_source' in pureoldextra:
2637 2636 del pureoldextra['amend_source']
2638 2637 extra['amend_source'] = old.hex()
2639 2638
2640 2639 new = context.memctx(repo,
2641 2640 parents=[base.node(), old.p2().node()],
2642 2641 text=message,
2643 2642 files=files,
2644 2643 filectxfn=filectxfn,
2645 2644 user=user,
2646 2645 date=date,
2647 2646 extra=extra,
2648 2647 editor=editor)
2649 2648
2650 2649 newdesc = changelog.stripdesc(new.description())
2651 2650 if ((not node)
2652 2651 and newdesc == old.description()
2653 2652 and user == old.user()
2654 2653 and date == old.date()
2655 2654 and pureextra == pureoldextra):
2656 2655 # nothing changed. continuing here would create a new node
2657 2656 # anyway because of the amend_source noise.
2658 2657 #
2659 2658 # This not what we expect from amend.
2660 2659 return old.node()
2661 2660
2662 2661 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2663 2662 try:
2664 2663 if opts.get('secret'):
2665 2664 commitphase = 'secret'
2666 2665 else:
2667 2666 commitphase = old.phase()
2668 2667 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2669 2668 newid = repo.commitctx(new)
2670 2669 finally:
2671 2670 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2672 2671 if newid != old.node():
2673 2672 # Reroute the working copy parent to the new changeset
2674 2673 repo.setparents(newid, nullid)
2675 2674
2676 2675 # Move bookmarks from old parent to amend commit
2677 2676 bms = repo.nodebookmarks(old.node())
2678 2677 if bms:
2679 2678 marks = repo._bookmarks
2680 2679 for bm in bms:
2681 2680 ui.debug('moving bookmarks %r from %s to %s\n' %
2682 2681 (marks, old.hex(), hex(newid)))
2683 2682 marks[bm] = newid
2684 2683 marks.recordchange(tr)
2685 2684 #commit the whole amend process
2686 2685 if createmarkers:
2687 2686 # mark the new changeset as successor of the rewritten one
2688 2687 new = repo[newid]
2689 2688 obs = [(old, (new,))]
2690 2689 if node:
2691 2690 obs.append((ctx, ()))
2692 2691
2693 2692 obsolete.createmarkers(repo, obs)
2694 2693 tr.close()
2695 2694 finally:
2696 2695 tr.release()
2697 2696 if not createmarkers and newid != old.node():
2698 2697 # Strip the intermediate commit (if there was one) and the amended
2699 2698 # commit
2700 2699 if node:
2701 2700 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2702 2701 ui.note(_('stripping amended changeset %s\n') % old)
2703 2702 repair.strip(ui, repo, old.node(), topic='amend-backup')
2704 2703 finally:
2705 2704 lockmod.release(lock, wlock)
2706 2705 return newid
2707 2706
2708 2707 def commiteditor(repo, ctx, subs, editform=''):
2709 2708 if ctx.description():
2710 2709 return ctx.description()
2711 2710 return commitforceeditor(repo, ctx, subs, editform=editform,
2712 2711 unchangedmessagedetection=True)
2713 2712
2714 2713 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2715 2714 editform='', unchangedmessagedetection=False):
2716 2715 if not extramsg:
2717 2716 extramsg = _("Leave message empty to abort commit.")
2718 2717
2719 2718 forms = [e for e in editform.split('.') if e]
2720 2719 forms.insert(0, 'changeset')
2721 2720 templatetext = None
2722 2721 while forms:
2723 2722 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2724 2723 if tmpl:
2725 2724 templatetext = committext = buildcommittemplate(
2726 2725 repo, ctx, subs, extramsg, tmpl)
2727 2726 break
2728 2727 forms.pop()
2729 2728 else:
2730 2729 committext = buildcommittext(repo, ctx, subs, extramsg)
2731 2730
2732 2731 # run editor in the repository root
2733 2732 olddir = os.getcwd()
2734 2733 os.chdir(repo.root)
2735 2734
2736 2735 # make in-memory changes visible to external process
2737 2736 tr = repo.currenttransaction()
2738 2737 repo.dirstate.write(tr)
2739 2738 pending = tr and tr.writepending() and repo.root
2740 2739
2741 2740 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2742 2741 editform=editform, pending=pending)
2743 2742 text = re.sub("(?m)^HG:.*(\n|$)", "", editortext)
2744 2743 os.chdir(olddir)
2745 2744
2746 2745 if finishdesc:
2747 2746 text = finishdesc(text)
2748 2747 if not text.strip():
2749 2748 raise error.Abort(_("empty commit message"))
2750 2749 if unchangedmessagedetection and editortext == templatetext:
2751 2750 raise error.Abort(_("commit message unchanged"))
2752 2751
2753 2752 return text
2754 2753
2755 2754 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2756 2755 ui = repo.ui
2757 2756 tmpl, mapfile = gettemplate(ui, tmpl, None)
2758 2757
2759 2758 try:
2760 2759 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2761 2760 except SyntaxError as inst:
2762 2761 raise error.Abort(inst.args[0])
2763 2762
2764 2763 for k, v in repo.ui.configitems('committemplate'):
2765 2764 if k != 'changeset':
2766 2765 t.t.cache[k] = v
2767 2766
2768 2767 if not extramsg:
2769 2768 extramsg = '' # ensure that extramsg is string
2770 2769
2771 2770 ui.pushbuffer()
2772 2771 t.show(ctx, extramsg=extramsg)
2773 2772 return ui.popbuffer()
2774 2773
2775 2774 def hgprefix(msg):
2776 2775 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2777 2776
2778 2777 def buildcommittext(repo, ctx, subs, extramsg):
2779 2778 edittext = []
2780 2779 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2781 2780 if ctx.description():
2782 2781 edittext.append(ctx.description())
2783 2782 edittext.append("")
2784 2783 edittext.append("") # Empty line between message and comments.
2785 2784 edittext.append(hgprefix(_("Enter commit message."
2786 2785 " Lines beginning with 'HG:' are removed.")))
2787 2786 edittext.append(hgprefix(extramsg))
2788 2787 edittext.append("HG: --")
2789 2788 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2790 2789 if ctx.p2():
2791 2790 edittext.append(hgprefix(_("branch merge")))
2792 2791 if ctx.branch():
2793 2792 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2794 2793 if bookmarks.isactivewdirparent(repo):
2795 2794 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2796 2795 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2797 2796 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2798 2797 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2799 2798 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2800 2799 if not added and not modified and not removed:
2801 2800 edittext.append(hgprefix(_("no files changed")))
2802 2801 edittext.append("")
2803 2802
2804 2803 return "\n".join(edittext)
2805 2804
2806 2805 def commitstatus(repo, node, branch, bheads=None, opts=None):
2807 2806 if opts is None:
2808 2807 opts = {}
2809 2808 ctx = repo[node]
2810 2809 parents = ctx.parents()
2811 2810
2812 2811 if (not opts.get('amend') and bheads and node not in bheads and not
2813 2812 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2814 2813 repo.ui.status(_('created new head\n'))
2815 2814 # The message is not printed for initial roots. For the other
2816 2815 # changesets, it is printed in the following situations:
2817 2816 #
2818 2817 # Par column: for the 2 parents with ...
2819 2818 # N: null or no parent
2820 2819 # B: parent is on another named branch
2821 2820 # C: parent is a regular non head changeset
2822 2821 # H: parent was a branch head of the current branch
2823 2822 # Msg column: whether we print "created new head" message
2824 2823 # In the following, it is assumed that there already exists some
2825 2824 # initial branch heads of the current branch, otherwise nothing is
2826 2825 # printed anyway.
2827 2826 #
2828 2827 # Par Msg Comment
2829 2828 # N N y additional topo root
2830 2829 #
2831 2830 # B N y additional branch root
2832 2831 # C N y additional topo head
2833 2832 # H N n usual case
2834 2833 #
2835 2834 # B B y weird additional branch root
2836 2835 # C B y branch merge
2837 2836 # H B n merge with named branch
2838 2837 #
2839 2838 # C C y additional head from merge
2840 2839 # C H n merge with a head
2841 2840 #
2842 2841 # H H n head merge: head count decreases
2843 2842
2844 2843 if not opts.get('close_branch'):
2845 2844 for r in parents:
2846 2845 if r.closesbranch() and r.branch() == branch:
2847 2846 repo.ui.status(_('reopening closed branch head %d\n') % r)
2848 2847
2849 2848 if repo.ui.debugflag:
2850 2849 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2851 2850 elif repo.ui.verbose:
2852 2851 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2853 2852
2854 2853 def revert(ui, repo, ctx, parents, *pats, **opts):
2855 2854 parent, p2 = parents
2856 2855 node = ctx.node()
2857 2856
2858 2857 mf = ctx.manifest()
2859 2858 if node == p2:
2860 2859 parent = p2
2861 2860 if node == parent:
2862 2861 pmf = mf
2863 2862 else:
2864 2863 pmf = None
2865 2864
2866 2865 # need all matching names in dirstate and manifest of target rev,
2867 2866 # so have to walk both. do not print errors if files exist in one
2868 2867 # but not other. in both cases, filesets should be evaluated against
2869 2868 # workingctx to get consistent result (issue4497). this means 'set:**'
2870 2869 # cannot be used to select missing files from target rev.
2871 2870
2872 2871 # `names` is a mapping for all elements in working copy and target revision
2873 2872 # The mapping is in the form:
2874 2873 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2875 2874 names = {}
2876 2875
2877 2876 wlock = repo.wlock()
2878 2877 try:
2879 2878 ## filling of the `names` mapping
2880 2879 # walk dirstate to fill `names`
2881 2880
2882 2881 interactive = opts.get('interactive', False)
2883 2882 wctx = repo[None]
2884 2883 m = scmutil.match(wctx, pats, opts)
2885 2884
2886 2885 # we'll need this later
2887 2886 targetsubs = sorted(s for s in wctx.substate if m(s))
2888 2887
2889 2888 if not m.always():
2890 2889 for abs in repo.walk(matchmod.badmatch(m, lambda x, y: False)):
2891 2890 names[abs] = m.rel(abs), m.exact(abs)
2892 2891
2893 2892 # walk target manifest to fill `names`
2894 2893
2895 2894 def badfn(path, msg):
2896 2895 if path in names:
2897 2896 return
2898 2897 if path in ctx.substate:
2899 2898 return
2900 2899 path_ = path + '/'
2901 2900 for f in names:
2902 2901 if f.startswith(path_):
2903 2902 return
2904 2903 ui.warn("%s: %s\n" % (m.rel(path), msg))
2905 2904
2906 2905 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
2907 2906 if abs not in names:
2908 2907 names[abs] = m.rel(abs), m.exact(abs)
2909 2908
2910 2909 # Find status of all file in `names`.
2911 2910 m = scmutil.matchfiles(repo, names)
2912 2911
2913 2912 changes = repo.status(node1=node, match=m,
2914 2913 unknown=True, ignored=True, clean=True)
2915 2914 else:
2916 2915 changes = repo.status(node1=node, match=m)
2917 2916 for kind in changes:
2918 2917 for abs in kind:
2919 2918 names[abs] = m.rel(abs), m.exact(abs)
2920 2919
2921 2920 m = scmutil.matchfiles(repo, names)
2922 2921
2923 2922 modified = set(changes.modified)
2924 2923 added = set(changes.added)
2925 2924 removed = set(changes.removed)
2926 2925 _deleted = set(changes.deleted)
2927 2926 unknown = set(changes.unknown)
2928 2927 unknown.update(changes.ignored)
2929 2928 clean = set(changes.clean)
2930 2929 modadded = set()
2931 2930
2932 2931 # split between files known in target manifest and the others
2933 2932 smf = set(mf)
2934 2933
2935 2934 # determine the exact nature of the deleted changesets
2936 2935 deladded = _deleted - smf
2937 2936 deleted = _deleted - deladded
2938 2937
2939 2938 # We need to account for the state of the file in the dirstate,
2940 2939 # even when we revert against something else than parent. This will
2941 2940 # slightly alter the behavior of revert (doing back up or not, delete
2942 2941 # or just forget etc).
2943 2942 if parent == node:
2944 2943 dsmodified = modified
2945 2944 dsadded = added
2946 2945 dsremoved = removed
2947 2946 # store all local modifications, useful later for rename detection
2948 2947 localchanges = dsmodified | dsadded
2949 2948 modified, added, removed = set(), set(), set()
2950 2949 else:
2951 2950 changes = repo.status(node1=parent, match=m)
2952 2951 dsmodified = set(changes.modified)
2953 2952 dsadded = set(changes.added)
2954 2953 dsremoved = set(changes.removed)
2955 2954 # store all local modifications, useful later for rename detection
2956 2955 localchanges = dsmodified | dsadded
2957 2956
2958 2957 # only take into account for removes between wc and target
2959 2958 clean |= dsremoved - removed
2960 2959 dsremoved &= removed
2961 2960 # distinct between dirstate remove and other
2962 2961 removed -= dsremoved
2963 2962
2964 2963 modadded = added & dsmodified
2965 2964 added -= modadded
2966 2965
2967 2966 # tell newly modified apart.
2968 2967 dsmodified &= modified
2969 2968 dsmodified |= modified & dsadded # dirstate added may needs backup
2970 2969 modified -= dsmodified
2971 2970
2972 2971 # We need to wait for some post-processing to update this set
2973 2972 # before making the distinction. The dirstate will be used for
2974 2973 # that purpose.
2975 2974 dsadded = added
2976 2975
2977 2976 # in case of merge, files that are actually added can be reported as
2978 2977 # modified, we need to post process the result
2979 2978 if p2 != nullid:
2980 2979 if pmf is None:
2981 2980 # only need parent manifest in the merge case,
2982 2981 # so do not read by default
2983 2982 pmf = repo[parent].manifest()
2984 2983 mergeadd = dsmodified - set(pmf)
2985 2984 dsadded |= mergeadd
2986 2985 dsmodified -= mergeadd
2987 2986
2988 2987 # if f is a rename, update `names` to also revert the source
2989 2988 cwd = repo.getcwd()
2990 2989 for f in localchanges:
2991 2990 src = repo.dirstate.copied(f)
2992 2991 # XXX should we check for rename down to target node?
2993 2992 if src and src not in names and repo.dirstate[src] == 'r':
2994 2993 dsremoved.add(src)
2995 2994 names[src] = (repo.pathto(src, cwd), True)
2996 2995
2997 2996 # distinguish between file to forget and the other
2998 2997 added = set()
2999 2998 for abs in dsadded:
3000 2999 if repo.dirstate[abs] != 'a':
3001 3000 added.add(abs)
3002 3001 dsadded -= added
3003 3002
3004 3003 for abs in deladded:
3005 3004 if repo.dirstate[abs] == 'a':
3006 3005 dsadded.add(abs)
3007 3006 deladded -= dsadded
3008 3007
3009 3008 # For files marked as removed, we check if an unknown file is present at
3010 3009 # the same path. If a such file exists it may need to be backed up.
3011 3010 # Making the distinction at this stage helps have simpler backup
3012 3011 # logic.
3013 3012 removunk = set()
3014 3013 for abs in removed:
3015 3014 target = repo.wjoin(abs)
3016 3015 if os.path.lexists(target):
3017 3016 removunk.add(abs)
3018 3017 removed -= removunk
3019 3018
3020 3019 dsremovunk = set()
3021 3020 for abs in dsremoved:
3022 3021 target = repo.wjoin(abs)
3023 3022 if os.path.lexists(target):
3024 3023 dsremovunk.add(abs)
3025 3024 dsremoved -= dsremovunk
3026 3025
3027 3026 # action to be actually performed by revert
3028 3027 # (<list of file>, message>) tuple
3029 3028 actions = {'revert': ([], _('reverting %s\n')),
3030 3029 'add': ([], _('adding %s\n')),
3031 3030 'remove': ([], _('removing %s\n')),
3032 3031 'drop': ([], _('removing %s\n')),
3033 3032 'forget': ([], _('forgetting %s\n')),
3034 3033 'undelete': ([], _('undeleting %s\n')),
3035 3034 'noop': (None, _('no changes needed to %s\n')),
3036 3035 'unknown': (None, _('file not managed: %s\n')),
3037 3036 }
3038 3037
3039 3038 # "constant" that convey the backup strategy.
3040 3039 # All set to `discard` if `no-backup` is set do avoid checking
3041 3040 # no_backup lower in the code.
3042 3041 # These values are ordered for comparison purposes
3043 3042 backup = 2 # unconditionally do backup
3044 3043 check = 1 # check if the existing file differs from target
3045 3044 discard = 0 # never do backup
3046 3045 if opts.get('no_backup'):
3047 3046 backup = check = discard
3048 3047
3049 3048 backupanddel = actions['remove']
3050 3049 if not opts.get('no_backup'):
3051 3050 backupanddel = actions['drop']
3052 3051
3053 3052 disptable = (
3054 3053 # dispatch table:
3055 3054 # file state
3056 3055 # action
3057 3056 # make backup
3058 3057
3059 3058 ## Sets that results that will change file on disk
3060 3059 # Modified compared to target, no local change
3061 3060 (modified, actions['revert'], discard),
3062 3061 # Modified compared to target, but local file is deleted
3063 3062 (deleted, actions['revert'], discard),
3064 3063 # Modified compared to target, local change
3065 3064 (dsmodified, actions['revert'], backup),
3066 3065 # Added since target
3067 3066 (added, actions['remove'], discard),
3068 3067 # Added in working directory
3069 3068 (dsadded, actions['forget'], discard),
3070 3069 # Added since target, have local modification
3071 3070 (modadded, backupanddel, backup),
3072 3071 # Added since target but file is missing in working directory
3073 3072 (deladded, actions['drop'], discard),
3074 3073 # Removed since target, before working copy parent
3075 3074 (removed, actions['add'], discard),
3076 3075 # Same as `removed` but an unknown file exists at the same path
3077 3076 (removunk, actions['add'], check),
3078 3077 # Removed since targe, marked as such in working copy parent
3079 3078 (dsremoved, actions['undelete'], discard),
3080 3079 # Same as `dsremoved` but an unknown file exists at the same path
3081 3080 (dsremovunk, actions['undelete'], check),
3082 3081 ## the following sets does not result in any file changes
3083 3082 # File with no modification
3084 3083 (clean, actions['noop'], discard),
3085 3084 # Existing file, not tracked anywhere
3086 3085 (unknown, actions['unknown'], discard),
3087 3086 )
3088 3087
3089 3088 for abs, (rel, exact) in sorted(names.items()):
3090 3089 # target file to be touch on disk (relative to cwd)
3091 3090 target = repo.wjoin(abs)
3092 3091 # search the entry in the dispatch table.
3093 3092 # if the file is in any of these sets, it was touched in the working
3094 3093 # directory parent and we are sure it needs to be reverted.
3095 3094 for table, (xlist, msg), dobackup in disptable:
3096 3095 if abs not in table:
3097 3096 continue
3098 3097 if xlist is not None:
3099 3098 xlist.append(abs)
3100 3099 if dobackup and (backup <= dobackup
3101 3100 or wctx[abs].cmp(ctx[abs])):
3102 3101 bakname = origpath(ui, repo, rel)
3103 3102 ui.note(_('saving current version of %s as %s\n') %
3104 3103 (rel, bakname))
3105 3104 if not opts.get('dry_run'):
3106 3105 if interactive:
3107 3106 util.copyfile(target, bakname)
3108 3107 else:
3109 3108 util.rename(target, bakname)
3110 3109 if ui.verbose or not exact:
3111 3110 if not isinstance(msg, basestring):
3112 3111 msg = msg(abs)
3113 3112 ui.status(msg % rel)
3114 3113 elif exact:
3115 3114 ui.warn(msg % rel)
3116 3115 break
3117 3116
3118 3117 if not opts.get('dry_run'):
3119 3118 needdata = ('revert', 'add', 'undelete')
3120 3119 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3121 3120 _performrevert(repo, parents, ctx, actions, interactive)
3122 3121
3123 3122 if targetsubs:
3124 3123 # Revert the subrepos on the revert list
3125 3124 for sub in targetsubs:
3126 3125 try:
3127 3126 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3128 3127 except KeyError:
3129 3128 raise error.Abort("subrepository '%s' does not exist in %s!"
3130 3129 % (sub, short(ctx.node())))
3131 3130 finally:
3132 3131 wlock.release()
3133 3132
3134 3133 def origpath(ui, repo, filepath):
3135 3134 '''customize where .orig files are created
3136 3135
3137 3136 Fetch user defined path from config file: [ui] origbackuppath = <path>
3138 3137 Fall back to default (filepath) if not specified
3139 3138 '''
3140 3139 origbackuppath = ui.config('ui', 'origbackuppath', None)
3141 3140 if origbackuppath is None:
3142 3141 return filepath + ".orig"
3143 3142
3144 3143 filepathfromroot = os.path.relpath(filepath, start=repo.root)
3145 3144 fullorigpath = repo.wjoin(origbackuppath, filepathfromroot)
3146 3145
3147 3146 origbackupdir = repo.vfs.dirname(fullorigpath)
3148 3147 if not repo.vfs.exists(origbackupdir):
3149 3148 ui.note(_('creating directory: %s\n') % origbackupdir)
3150 3149 util.makedirs(origbackupdir)
3151 3150
3152 3151 return fullorigpath + ".orig"
3153 3152
3154 3153 def _revertprefetch(repo, ctx, *files):
3155 3154 """Let extension changing the storage layer prefetch content"""
3156 3155 pass
3157 3156
3158 3157 def _performrevert(repo, parents, ctx, actions, interactive=False):
3159 3158 """function that actually perform all the actions computed for revert
3160 3159
3161 3160 This is an independent function to let extension to plug in and react to
3162 3161 the imminent revert.
3163 3162
3164 3163 Make sure you have the working directory locked when calling this function.
3165 3164 """
3166 3165 parent, p2 = parents
3167 3166 node = ctx.node()
3168 3167 def checkout(f):
3169 3168 fc = ctx[f]
3170 3169 repo.wwrite(f, fc.data(), fc.flags())
3171 3170
3172 3171 audit_path = pathutil.pathauditor(repo.root)
3173 3172 for f in actions['forget'][0]:
3174 3173 repo.dirstate.drop(f)
3175 3174 for f in actions['remove'][0]:
3176 3175 audit_path(f)
3177 3176 try:
3178 3177 util.unlinkpath(repo.wjoin(f))
3179 3178 except OSError:
3180 3179 pass
3181 3180 repo.dirstate.remove(f)
3182 3181 for f in actions['drop'][0]:
3183 3182 audit_path(f)
3184 3183 repo.dirstate.remove(f)
3185 3184
3186 3185 normal = None
3187 3186 if node == parent:
3188 3187 # We're reverting to our parent. If possible, we'd like status
3189 3188 # to report the file as clean. We have to use normallookup for
3190 3189 # merges to avoid losing information about merged/dirty files.
3191 3190 if p2 != nullid:
3192 3191 normal = repo.dirstate.normallookup
3193 3192 else:
3194 3193 normal = repo.dirstate.normal
3195 3194
3196 3195 newlyaddedandmodifiedfiles = set()
3197 3196 if interactive:
3198 3197 # Prompt the user for changes to revert
3199 3198 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3200 3199 m = scmutil.match(ctx, torevert, {})
3201 3200 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3202 3201 diffopts.nodates = True
3203 3202 diffopts.git = True
3204 3203 reversehunks = repo.ui.configbool('experimental',
3205 3204 'revertalternateinteractivemode',
3206 3205 True)
3207 3206 if reversehunks:
3208 3207 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3209 3208 else:
3210 3209 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3211 3210 originalchunks = patch.parsepatch(diff)
3212 3211
3213 3212 try:
3214 3213
3215 3214 chunks, opts = recordfilter(repo.ui, originalchunks)
3216 3215 if reversehunks:
3217 3216 chunks = patch.reversehunks(chunks)
3218 3217
3219 3218 except patch.PatchError as err:
3220 3219 raise error.Abort(_('error parsing patch: %s') % err)
3221 3220
3222 3221 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3223 3222 # Apply changes
3224 3223 fp = cStringIO.StringIO()
3225 3224 for c in chunks:
3226 3225 c.write(fp)
3227 3226 dopatch = fp.tell()
3228 3227 fp.seek(0)
3229 3228 if dopatch:
3230 3229 try:
3231 3230 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3232 3231 except patch.PatchError as err:
3233 3232 raise error.Abort(str(err))
3234 3233 del fp
3235 3234 else:
3236 3235 for f in actions['revert'][0]:
3237 3236 checkout(f)
3238 3237 if normal:
3239 3238 normal(f)
3240 3239
3241 3240 for f in actions['add'][0]:
3242 3241 # Don't checkout modified files, they are already created by the diff
3243 3242 if f not in newlyaddedandmodifiedfiles:
3244 3243 checkout(f)
3245 3244 repo.dirstate.add(f)
3246 3245
3247 3246 normal = repo.dirstate.normallookup
3248 3247 if node == parent and p2 == nullid:
3249 3248 normal = repo.dirstate.normal
3250 3249 for f in actions['undelete'][0]:
3251 3250 checkout(f)
3252 3251 normal(f)
3253 3252
3254 3253 copied = copies.pathcopies(repo[parent], ctx)
3255 3254
3256 3255 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3257 3256 if f in copied:
3258 3257 repo.dirstate.copy(copied[f], f)
3259 3258
3260 3259 def command(table):
3261 3260 """Returns a function object to be used as a decorator for making commands.
3262 3261
3263 3262 This function receives a command table as its argument. The table should
3264 3263 be a dict.
3265 3264
3266 3265 The returned function can be used as a decorator for adding commands
3267 3266 to that command table. This function accepts multiple arguments to define
3268 3267 a command.
3269 3268
3270 3269 The first argument is the command name.
3271 3270
3272 3271 The options argument is an iterable of tuples defining command arguments.
3273 3272 See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
3274 3273
3275 3274 The synopsis argument defines a short, one line summary of how to use the
3276 3275 command. This shows up in the help output.
3277 3276
3278 3277 The norepo argument defines whether the command does not require a
3279 3278 local repository. Most commands operate against a repository, thus the
3280 3279 default is False.
3281 3280
3282 3281 The optionalrepo argument defines whether the command optionally requires
3283 3282 a local repository.
3284 3283
3285 3284 The inferrepo argument defines whether to try to find a repository from the
3286 3285 command line arguments. If True, arguments will be examined for potential
3287 3286 repository locations. See ``findrepo()``. If a repository is found, it
3288 3287 will be used.
3289 3288 """
3290 3289 def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False,
3291 3290 inferrepo=False):
3292 3291 def decorator(func):
3293 3292 if synopsis:
3294 3293 table[name] = func, list(options), synopsis
3295 3294 else:
3296 3295 table[name] = func, list(options)
3297 3296
3298 3297 if norepo:
3299 3298 # Avoid import cycle.
3300 3299 import commands
3301 3300 commands.norepo += ' %s' % ' '.join(parsealiases(name))
3302 3301
3303 3302 if optionalrepo:
3304 3303 import commands
3305 3304 commands.optionalrepo += ' %s' % ' '.join(parsealiases(name))
3306 3305
3307 3306 if inferrepo:
3308 3307 import commands
3309 3308 commands.inferrepo += ' %s' % ' '.join(parsealiases(name))
3310 3309
3311 3310 return func
3312 3311 return decorator
3313 3312
3314 3313 return cmd
3315 3314
3316 3315 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3317 3316 # commands.outgoing. "missing" is "missing" of the result of
3318 3317 # "findcommonoutgoing()"
3319 3318 outgoinghooks = util.hooks()
3320 3319
3321 3320 # a list of (ui, repo) functions called by commands.summary
3322 3321 summaryhooks = util.hooks()
3323 3322
3324 3323 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3325 3324 #
3326 3325 # functions should return tuple of booleans below, if 'changes' is None:
3327 3326 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3328 3327 #
3329 3328 # otherwise, 'changes' is a tuple of tuples below:
3330 3329 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3331 3330 # - (desturl, destbranch, destpeer, outgoing)
3332 3331 summaryremotehooks = util.hooks()
3333 3332
3334 3333 # A list of state files kept by multistep operations like graft.
3335 3334 # Since graft cannot be aborted, it is considered 'clearable' by update.
3336 3335 # note: bisect is intentionally excluded
3337 3336 # (state file, clearable, allowcommit, error, hint)
3338 3337 unfinishedstates = [
3339 3338 ('graftstate', True, False, _('graft in progress'),
3340 3339 _("use 'hg graft --continue' or 'hg update' to abort")),
3341 3340 ('updatestate', True, False, _('last update was interrupted'),
3342 3341 _("use 'hg update' to get a consistent checkout"))
3343 3342 ]
3344 3343
3345 3344 def checkunfinished(repo, commit=False):
3346 3345 '''Look for an unfinished multistep operation, like graft, and abort
3347 3346 if found. It's probably good to check this right before
3348 3347 bailifchanged().
3349 3348 '''
3350 3349 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3351 3350 if commit and allowcommit:
3352 3351 continue
3353 3352 if repo.vfs.exists(f):
3354 3353 raise error.Abort(msg, hint=hint)
3355 3354
3356 3355 def clearunfinished(repo):
3357 3356 '''Check for unfinished operations (as above), and clear the ones
3358 3357 that are clearable.
3359 3358 '''
3360 3359 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3361 3360 if not clearable and repo.vfs.exists(f):
3362 3361 raise error.Abort(msg, hint=hint)
3363 3362 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3364 3363 if clearable and repo.vfs.exists(f):
3365 3364 util.unlink(repo.join(f))
3366 3365
3367 3366 class dirstateguard(object):
3368 3367 '''Restore dirstate at unexpected failure.
3369 3368
3370 3369 At the construction, this class does:
3371 3370
3372 3371 - write current ``repo.dirstate`` out, and
3373 3372 - save ``.hg/dirstate`` into the backup file
3374 3373
3375 3374 This restores ``.hg/dirstate`` from backup file, if ``release()``
3376 3375 is invoked before ``close()``.
3377 3376
3378 3377 This just removes the backup file at ``close()`` before ``release()``.
3379 3378 '''
3380 3379
3381 3380 def __init__(self, repo, name):
3382 3381 self._repo = repo
3383 3382 self._suffix = '.backup.%s.%d' % (name, id(self))
3384 3383 repo.dirstate._savebackup(repo.currenttransaction(), self._suffix)
3385 3384 self._active = True
3386 3385 self._closed = False
3387 3386
3388 3387 def __del__(self):
3389 3388 if self._active: # still active
3390 3389 # this may occur, even if this class is used correctly:
3391 3390 # for example, releasing other resources like transaction
3392 3391 # may raise exception before ``dirstateguard.release`` in
3393 3392 # ``release(tr, ....)``.
3394 3393 self._abort()
3395 3394
3396 3395 def close(self):
3397 3396 if not self._active: # already inactivated
3398 3397 msg = (_("can't close already inactivated backup: dirstate%s")
3399 3398 % self._suffix)
3400 3399 raise error.Abort(msg)
3401 3400
3402 3401 self._repo.dirstate._clearbackup(self._repo.currenttransaction(),
3403 3402 self._suffix)
3404 3403 self._active = False
3405 3404 self._closed = True
3406 3405
3407 3406 def _abort(self):
3408 3407 self._repo.dirstate._restorebackup(self._repo.currenttransaction(),
3409 3408 self._suffix)
3410 3409 self._active = False
3411 3410
3412 3411 def release(self):
3413 3412 if not self._closed:
3414 3413 if not self._active: # already inactivated
3415 3414 msg = (_("can't release already inactivated backup:"
3416 3415 " dirstate%s")
3417 3416 % self._suffix)
3418 3417 raise error.Abort(msg)
3419 3418 self._abort()
General Comments 0
You need to be logged in to leave comments. Login now