##// END OF EJS Templates
cmdutil: put recordfunc invocation into wlock scope for consistency...
FUJIWARA Katsunori -
r25758:c5dfa47a default
parent child Browse files
Show More
@@ -1,3322 +1,3329 b''
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import hex, nullid, nullrev, short
9 9 from i18n import _
10 10 import os, sys, errno, re, tempfile, cStringIO, shutil
11 11 import util, scmutil, templater, patch, error, templatekw, revlog, copies
12 12 import match as matchmod
13 13 import context, repair, graphmod, revset, phases, obsolete, pathutil
14 14 import changelog
15 15 import bookmarks
16 16 import encoding
17 17 import formatter
18 18 import crecord as crecordmod
19 19 import lock as lockmod
20 20
21 21 def ishunk(x):
22 22 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
23 23 return isinstance(x, hunkclasses)
24 24
25 25 def newandmodified(chunks, originalchunks):
26 26 newlyaddedandmodifiedfiles = set()
27 27 for chunk in chunks:
28 28 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
29 29 originalchunks:
30 30 newlyaddedandmodifiedfiles.add(chunk.header.filename())
31 31 return newlyaddedandmodifiedfiles
32 32
33 33 def parsealiases(cmd):
34 34 return cmd.lstrip("^").split("|")
35 35
36 36 def setupwrapcolorwrite(ui):
37 37 # wrap ui.write so diff output can be labeled/colorized
38 38 def wrapwrite(orig, *args, **kw):
39 39 label = kw.pop('label', '')
40 40 for chunk, l in patch.difflabel(lambda: args):
41 41 orig(chunk, label=label + l)
42 42
43 43 oldwrite = ui.write
44 44 def wrap(*args, **kwargs):
45 45 return wrapwrite(oldwrite, *args, **kwargs)
46 46 setattr(ui, 'write', wrap)
47 47 return oldwrite
48 48
49 49 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
50 50 if usecurses:
51 51 if testfile:
52 52 recordfn = crecordmod.testdecorator(testfile,
53 53 crecordmod.testchunkselector)
54 54 else:
55 55 recordfn = crecordmod.chunkselector
56 56
57 57 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
58 58
59 59 else:
60 60 return patch.filterpatch(ui, originalhunks, operation)
61 61
62 62 def recordfilter(ui, originalhunks, operation=None):
63 63 """ Prompts the user to filter the originalhunks and return a list of
64 64 selected hunks.
65 65 *operation* is used for ui purposes to indicate the user
66 66 what kind of filtering they are doing: reverting, commiting, shelving, etc.
67 67 *operation* has to be a translated string.
68 68 """
69 69 usecurses = ui.configbool('experimental', 'crecord', False)
70 70 testfile = ui.config('experimental', 'crecordtest', None)
71 71 oldwrite = setupwrapcolorwrite(ui)
72 72 try:
73 73 newchunks = filterchunks(ui, originalhunks, usecurses, testfile,
74 74 operation)
75 75 finally:
76 76 ui.write = oldwrite
77 77 return newchunks
78 78
79 79 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
80 80 filterfn, *pats, **opts):
81 81 import merge as mergemod
82 82
83 83 if not ui.interactive():
84 84 raise util.Abort(_('running non-interactively, use %s instead') %
85 85 cmdsuggest)
86 86
87 87 # make sure username is set before going interactive
88 88 if not opts.get('user'):
89 89 ui.username() # raise exception, username not provided
90 90
91 91 def recordfunc(ui, repo, message, match, opts):
92 92 """This is generic record driver.
93 93
94 94 Its job is to interactively filter local changes, and
95 95 accordingly prepare working directory into a state in which the
96 96 job can be delegated to a non-interactive commit command such as
97 97 'commit' or 'qrefresh'.
98 98
99 99 After the actual job is done by non-interactive command, the
100 100 working directory is restored to its original state.
101 101
102 102 In the end we'll record interesting changes, and everything else
103 103 will be left in place, so the user can continue working.
104 104 """
105 105
106 106 checkunfinished(repo, commit=True)
107 107 merge = len(repo[None].parents()) > 1
108 108 if merge:
109 109 raise util.Abort(_('cannot partially commit a merge '
110 110 '(use "hg commit" instead)'))
111 111
112 112 status = repo.status(match=match)
113 113 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
114 114 diffopts.nodates = True
115 115 diffopts.git = True
116 116 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
117 117 originalchunks = patch.parsepatch(originaldiff)
118 118
119 119 # 1. filter patch, so we have intending-to apply subset of it
120 120 try:
121 121 chunks = filterfn(ui, originalchunks)
122 122 except patch.PatchError as err:
123 123 raise util.Abort(_('error parsing patch: %s') % err)
124 124
125 125 # We need to keep a backup of files that have been newly added and
126 126 # modified during the recording process because there is a previous
127 127 # version without the edit in the workdir
128 128 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
129 129 contenders = set()
130 130 for h in chunks:
131 131 try:
132 132 contenders.update(set(h.files()))
133 133 except AttributeError:
134 134 pass
135 135
136 136 changed = status.modified + status.added + status.removed
137 137 newfiles = [f for f in changed if f in contenders]
138 138 if not newfiles:
139 139 ui.status(_('no changes to record\n'))
140 140 return 0
141 141
142 142 modified = set(status.modified)
143 143
144 144 # 2. backup changed files, so we can restore them in the end
145 145
146 146 if backupall:
147 147 tobackup = changed
148 148 else:
149 149 tobackup = [f for f in newfiles if f in modified or f in \
150 150 newlyaddedandmodifiedfiles]
151 151 backups = {}
152 152 if tobackup:
153 153 backupdir = repo.join('record-backups')
154 154 try:
155 155 os.mkdir(backupdir)
156 156 except OSError as err:
157 157 if err.errno != errno.EEXIST:
158 158 raise
159 159 try:
160 160 # backup continues
161 161 for f in tobackup:
162 162 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
163 163 dir=backupdir)
164 164 os.close(fd)
165 165 ui.debug('backup %r as %r\n' % (f, tmpname))
166 166 util.copyfile(repo.wjoin(f), tmpname)
167 167 shutil.copystat(repo.wjoin(f), tmpname)
168 168 backups[f] = tmpname
169 169
170 170 fp = cStringIO.StringIO()
171 171 for c in chunks:
172 172 fname = c.filename()
173 173 if fname in backups:
174 174 c.write(fp)
175 175 dopatch = fp.tell()
176 176 fp.seek(0)
177 177
178 178 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
179 179 # 3a. apply filtered patch to clean repo (clean)
180 180 if backups:
181 181 # Equivalent to hg.revert
182 182 choices = lambda key: key in backups
183 183 mergemod.update(repo, repo.dirstate.p1(),
184 184 False, True, choices)
185 185
186 186 # 3b. (apply)
187 187 if dopatch:
188 188 try:
189 189 ui.debug('applying patch\n')
190 190 ui.debug(fp.getvalue())
191 191 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
192 192 except patch.PatchError as err:
193 193 raise util.Abort(str(err))
194 194 del fp
195 195
196 196 # 4. We prepared working directory according to filtered
197 197 # patch. Now is the time to delegate the job to
198 198 # commit/qrefresh or the like!
199 199
200 200 # Make all of the pathnames absolute.
201 201 newfiles = [repo.wjoin(nf) for nf in newfiles]
202 202 return commitfunc(ui, repo, *newfiles, **opts)
203 203 finally:
204 204 # 5. finally restore backed-up files
205 205 try:
206 206 for realname, tmpname in backups.iteritems():
207 207 ui.debug('restoring %r to %r\n' % (tmpname, realname))
208 208 util.copyfile(tmpname, repo.wjoin(realname))
209 209 # Our calls to copystat() here and above are a
210 210 # hack to trick any editors that have f open that
211 211 # we haven't modified them.
212 212 #
213 213 # Also note that this racy as an editor could
214 214 # notice the file's mtime before we've finished
215 215 # writing it.
216 216 shutil.copystat(tmpname, repo.wjoin(realname))
217 217 os.unlink(tmpname)
218 218 if tobackup:
219 219 os.rmdir(backupdir)
220 220 except OSError:
221 221 pass
222 222
223 return commit(ui, repo, recordfunc, pats, opts)
223 def recordinwlock(ui, repo, message, match, opts):
224 wlock = repo.wlock()
225 try:
226 return recordfunc(ui, repo, message, match, opts)
227 finally:
228 wlock.release()
229
230 return commit(ui, repo, recordinwlock, pats, opts)
224 231
225 232 def findpossible(cmd, table, strict=False):
226 233 """
227 234 Return cmd -> (aliases, command table entry)
228 235 for each matching command.
229 236 Return debug commands (or their aliases) only if no normal command matches.
230 237 """
231 238 choice = {}
232 239 debugchoice = {}
233 240
234 241 if cmd in table:
235 242 # short-circuit exact matches, "log" alias beats "^log|history"
236 243 keys = [cmd]
237 244 else:
238 245 keys = table.keys()
239 246
240 247 allcmds = []
241 248 for e in keys:
242 249 aliases = parsealiases(e)
243 250 allcmds.extend(aliases)
244 251 found = None
245 252 if cmd in aliases:
246 253 found = cmd
247 254 elif not strict:
248 255 for a in aliases:
249 256 if a.startswith(cmd):
250 257 found = a
251 258 break
252 259 if found is not None:
253 260 if aliases[0].startswith("debug") or found.startswith("debug"):
254 261 debugchoice[found] = (aliases, table[e])
255 262 else:
256 263 choice[found] = (aliases, table[e])
257 264
258 265 if not choice and debugchoice:
259 266 choice = debugchoice
260 267
261 268 return choice, allcmds
262 269
263 270 def findcmd(cmd, table, strict=True):
264 271 """Return (aliases, command table entry) for command string."""
265 272 choice, allcmds = findpossible(cmd, table, strict)
266 273
267 274 if cmd in choice:
268 275 return choice[cmd]
269 276
270 277 if len(choice) > 1:
271 278 clist = choice.keys()
272 279 clist.sort()
273 280 raise error.AmbiguousCommand(cmd, clist)
274 281
275 282 if choice:
276 283 return choice.values()[0]
277 284
278 285 raise error.UnknownCommand(cmd, allcmds)
279 286
280 287 def findrepo(p):
281 288 while not os.path.isdir(os.path.join(p, ".hg")):
282 289 oldp, p = p, os.path.dirname(p)
283 290 if p == oldp:
284 291 return None
285 292
286 293 return p
287 294
288 295 def bailifchanged(repo, merge=True):
289 296 if merge and repo.dirstate.p2() != nullid:
290 297 raise util.Abort(_('outstanding uncommitted merge'))
291 298 modified, added, removed, deleted = repo.status()[:4]
292 299 if modified or added or removed or deleted:
293 300 raise util.Abort(_('uncommitted changes'))
294 301 ctx = repo[None]
295 302 for s in sorted(ctx.substate):
296 303 ctx.sub(s).bailifchanged()
297 304
298 305 def logmessage(ui, opts):
299 306 """ get the log message according to -m and -l option """
300 307 message = opts.get('message')
301 308 logfile = opts.get('logfile')
302 309
303 310 if message and logfile:
304 311 raise util.Abort(_('options --message and --logfile are mutually '
305 312 'exclusive'))
306 313 if not message and logfile:
307 314 try:
308 315 if logfile == '-':
309 316 message = ui.fin.read()
310 317 else:
311 318 message = '\n'.join(util.readfile(logfile).splitlines())
312 319 except IOError as inst:
313 320 raise util.Abort(_("can't read commit message '%s': %s") %
314 321 (logfile, inst.strerror))
315 322 return message
316 323
317 324 def mergeeditform(ctxorbool, baseformname):
318 325 """return appropriate editform name (referencing a committemplate)
319 326
320 327 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
321 328 merging is committed.
322 329
323 330 This returns baseformname with '.merge' appended if it is a merge,
324 331 otherwise '.normal' is appended.
325 332 """
326 333 if isinstance(ctxorbool, bool):
327 334 if ctxorbool:
328 335 return baseformname + ".merge"
329 336 elif 1 < len(ctxorbool.parents()):
330 337 return baseformname + ".merge"
331 338
332 339 return baseformname + ".normal"
333 340
334 341 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
335 342 editform='', **opts):
336 343 """get appropriate commit message editor according to '--edit' option
337 344
338 345 'finishdesc' is a function to be called with edited commit message
339 346 (= 'description' of the new changeset) just after editing, but
340 347 before checking empty-ness. It should return actual text to be
341 348 stored into history. This allows to change description before
342 349 storing.
343 350
344 351 'extramsg' is a extra message to be shown in the editor instead of
345 352 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
346 353 is automatically added.
347 354
348 355 'editform' is a dot-separated list of names, to distinguish
349 356 the purpose of commit text editing.
350 357
351 358 'getcommiteditor' returns 'commitforceeditor' regardless of
352 359 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
353 360 they are specific for usage in MQ.
354 361 """
355 362 if edit or finishdesc or extramsg:
356 363 return lambda r, c, s: commitforceeditor(r, c, s,
357 364 finishdesc=finishdesc,
358 365 extramsg=extramsg,
359 366 editform=editform)
360 367 elif editform:
361 368 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
362 369 else:
363 370 return commiteditor
364 371
365 372 def loglimit(opts):
366 373 """get the log limit according to option -l/--limit"""
367 374 limit = opts.get('limit')
368 375 if limit:
369 376 try:
370 377 limit = int(limit)
371 378 except ValueError:
372 379 raise util.Abort(_('limit must be a positive integer'))
373 380 if limit <= 0:
374 381 raise util.Abort(_('limit must be positive'))
375 382 else:
376 383 limit = None
377 384 return limit
378 385
379 386 def makefilename(repo, pat, node, desc=None,
380 387 total=None, seqno=None, revwidth=None, pathname=None):
381 388 node_expander = {
382 389 'H': lambda: hex(node),
383 390 'R': lambda: str(repo.changelog.rev(node)),
384 391 'h': lambda: short(node),
385 392 'm': lambda: re.sub('[^\w]', '_', str(desc))
386 393 }
387 394 expander = {
388 395 '%': lambda: '%',
389 396 'b': lambda: os.path.basename(repo.root),
390 397 }
391 398
392 399 try:
393 400 if node:
394 401 expander.update(node_expander)
395 402 if node:
396 403 expander['r'] = (lambda:
397 404 str(repo.changelog.rev(node)).zfill(revwidth or 0))
398 405 if total is not None:
399 406 expander['N'] = lambda: str(total)
400 407 if seqno is not None:
401 408 expander['n'] = lambda: str(seqno)
402 409 if total is not None and seqno is not None:
403 410 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
404 411 if pathname is not None:
405 412 expander['s'] = lambda: os.path.basename(pathname)
406 413 expander['d'] = lambda: os.path.dirname(pathname) or '.'
407 414 expander['p'] = lambda: pathname
408 415
409 416 newname = []
410 417 patlen = len(pat)
411 418 i = 0
412 419 while i < patlen:
413 420 c = pat[i]
414 421 if c == '%':
415 422 i += 1
416 423 c = pat[i]
417 424 c = expander[c]()
418 425 newname.append(c)
419 426 i += 1
420 427 return ''.join(newname)
421 428 except KeyError as inst:
422 429 raise util.Abort(_("invalid format spec '%%%s' in output filename") %
423 430 inst.args[0])
424 431
425 432 def makefileobj(repo, pat, node=None, desc=None, total=None,
426 433 seqno=None, revwidth=None, mode='wb', modemap=None,
427 434 pathname=None):
428 435
429 436 writable = mode not in ('r', 'rb')
430 437
431 438 if not pat or pat == '-':
432 439 if writable:
433 440 fp = repo.ui.fout
434 441 else:
435 442 fp = repo.ui.fin
436 443 if util.safehasattr(fp, 'fileno'):
437 444 return os.fdopen(os.dup(fp.fileno()), mode)
438 445 else:
439 446 # if this fp can't be duped properly, return
440 447 # a dummy object that can be closed
441 448 class wrappedfileobj(object):
442 449 noop = lambda x: None
443 450 def __init__(self, f):
444 451 self.f = f
445 452 def __getattr__(self, attr):
446 453 if attr == 'close':
447 454 return self.noop
448 455 else:
449 456 return getattr(self.f, attr)
450 457
451 458 return wrappedfileobj(fp)
452 459 if util.safehasattr(pat, 'write') and writable:
453 460 return pat
454 461 if util.safehasattr(pat, 'read') and 'r' in mode:
455 462 return pat
456 463 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
457 464 if modemap is not None:
458 465 mode = modemap.get(fn, mode)
459 466 if mode == 'wb':
460 467 modemap[fn] = 'ab'
461 468 return open(fn, mode)
462 469
463 470 def openrevlog(repo, cmd, file_, opts):
464 471 """opens the changelog, manifest, a filelog or a given revlog"""
465 472 cl = opts['changelog']
466 473 mf = opts['manifest']
467 474 dir = opts['dir']
468 475 msg = None
469 476 if cl and mf:
470 477 msg = _('cannot specify --changelog and --manifest at the same time')
471 478 elif cl and dir:
472 479 msg = _('cannot specify --changelog and --dir at the same time')
473 480 elif cl or mf:
474 481 if file_:
475 482 msg = _('cannot specify filename with --changelog or --manifest')
476 483 elif not repo:
477 484 msg = _('cannot specify --changelog or --manifest or --dir '
478 485 'without a repository')
479 486 if msg:
480 487 raise util.Abort(msg)
481 488
482 489 r = None
483 490 if repo:
484 491 if cl:
485 492 r = repo.unfiltered().changelog
486 493 elif dir:
487 494 if 'treemanifest' not in repo.requirements:
488 495 raise util.Abort(_("--dir can only be used on repos with "
489 496 "treemanifest enabled"))
490 497 dirlog = repo.dirlog(file_)
491 498 if len(dirlog):
492 499 r = dirlog
493 500 elif mf:
494 501 r = repo.manifest
495 502 elif file_:
496 503 filelog = repo.file(file_)
497 504 if len(filelog):
498 505 r = filelog
499 506 if not r:
500 507 if not file_:
501 508 raise error.CommandError(cmd, _('invalid arguments'))
502 509 if not os.path.isfile(file_):
503 510 raise util.Abort(_("revlog '%s' not found") % file_)
504 511 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
505 512 file_[:-2] + ".i")
506 513 return r
507 514
508 515 def copy(ui, repo, pats, opts, rename=False):
509 516 # called with the repo lock held
510 517 #
511 518 # hgsep => pathname that uses "/" to separate directories
512 519 # ossep => pathname that uses os.sep to separate directories
513 520 cwd = repo.getcwd()
514 521 targets = {}
515 522 after = opts.get("after")
516 523 dryrun = opts.get("dry_run")
517 524 wctx = repo[None]
518 525
519 526 def walkpat(pat):
520 527 srcs = []
521 528 if after:
522 529 badstates = '?'
523 530 else:
524 531 badstates = '?r'
525 532 m = scmutil.match(repo[None], [pat], opts, globbed=True)
526 533 for abs in repo.walk(m):
527 534 state = repo.dirstate[abs]
528 535 rel = m.rel(abs)
529 536 exact = m.exact(abs)
530 537 if state in badstates:
531 538 if exact and state == '?':
532 539 ui.warn(_('%s: not copying - file is not managed\n') % rel)
533 540 if exact and state == 'r':
534 541 ui.warn(_('%s: not copying - file has been marked for'
535 542 ' remove\n') % rel)
536 543 continue
537 544 # abs: hgsep
538 545 # rel: ossep
539 546 srcs.append((abs, rel, exact))
540 547 return srcs
541 548
542 549 # abssrc: hgsep
543 550 # relsrc: ossep
544 551 # otarget: ossep
545 552 def copyfile(abssrc, relsrc, otarget, exact):
546 553 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
547 554 if '/' in abstarget:
548 555 # We cannot normalize abstarget itself, this would prevent
549 556 # case only renames, like a => A.
550 557 abspath, absname = abstarget.rsplit('/', 1)
551 558 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
552 559 reltarget = repo.pathto(abstarget, cwd)
553 560 target = repo.wjoin(abstarget)
554 561 src = repo.wjoin(abssrc)
555 562 state = repo.dirstate[abstarget]
556 563
557 564 scmutil.checkportable(ui, abstarget)
558 565
559 566 # check for collisions
560 567 prevsrc = targets.get(abstarget)
561 568 if prevsrc is not None:
562 569 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
563 570 (reltarget, repo.pathto(abssrc, cwd),
564 571 repo.pathto(prevsrc, cwd)))
565 572 return
566 573
567 574 # check for overwrites
568 575 exists = os.path.lexists(target)
569 576 samefile = False
570 577 if exists and abssrc != abstarget:
571 578 if (repo.dirstate.normalize(abssrc) ==
572 579 repo.dirstate.normalize(abstarget)):
573 580 if not rename:
574 581 ui.warn(_("%s: can't copy - same file\n") % reltarget)
575 582 return
576 583 exists = False
577 584 samefile = True
578 585
579 586 if not after and exists or after and state in 'mn':
580 587 if not opts['force']:
581 588 ui.warn(_('%s: not overwriting - file exists\n') %
582 589 reltarget)
583 590 return
584 591
585 592 if after:
586 593 if not exists:
587 594 if rename:
588 595 ui.warn(_('%s: not recording move - %s does not exist\n') %
589 596 (relsrc, reltarget))
590 597 else:
591 598 ui.warn(_('%s: not recording copy - %s does not exist\n') %
592 599 (relsrc, reltarget))
593 600 return
594 601 elif not dryrun:
595 602 try:
596 603 if exists:
597 604 os.unlink(target)
598 605 targetdir = os.path.dirname(target) or '.'
599 606 if not os.path.isdir(targetdir):
600 607 os.makedirs(targetdir)
601 608 if samefile:
602 609 tmp = target + "~hgrename"
603 610 os.rename(src, tmp)
604 611 os.rename(tmp, target)
605 612 else:
606 613 util.copyfile(src, target)
607 614 srcexists = True
608 615 except IOError as inst:
609 616 if inst.errno == errno.ENOENT:
610 617 ui.warn(_('%s: deleted in working directory\n') % relsrc)
611 618 srcexists = False
612 619 else:
613 620 ui.warn(_('%s: cannot copy - %s\n') %
614 621 (relsrc, inst.strerror))
615 622 return True # report a failure
616 623
617 624 if ui.verbose or not exact:
618 625 if rename:
619 626 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
620 627 else:
621 628 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
622 629
623 630 targets[abstarget] = abssrc
624 631
625 632 # fix up dirstate
626 633 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
627 634 dryrun=dryrun, cwd=cwd)
628 635 if rename and not dryrun:
629 636 if not after and srcexists and not samefile:
630 637 util.unlinkpath(repo.wjoin(abssrc))
631 638 wctx.forget([abssrc])
632 639
633 640 # pat: ossep
634 641 # dest ossep
635 642 # srcs: list of (hgsep, hgsep, ossep, bool)
636 643 # return: function that takes hgsep and returns ossep
637 644 def targetpathfn(pat, dest, srcs):
638 645 if os.path.isdir(pat):
639 646 abspfx = pathutil.canonpath(repo.root, cwd, pat)
640 647 abspfx = util.localpath(abspfx)
641 648 if destdirexists:
642 649 striplen = len(os.path.split(abspfx)[0])
643 650 else:
644 651 striplen = len(abspfx)
645 652 if striplen:
646 653 striplen += len(os.sep)
647 654 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
648 655 elif destdirexists:
649 656 res = lambda p: os.path.join(dest,
650 657 os.path.basename(util.localpath(p)))
651 658 else:
652 659 res = lambda p: dest
653 660 return res
654 661
655 662 # pat: ossep
656 663 # dest ossep
657 664 # srcs: list of (hgsep, hgsep, ossep, bool)
658 665 # return: function that takes hgsep and returns ossep
659 666 def targetpathafterfn(pat, dest, srcs):
660 667 if matchmod.patkind(pat):
661 668 # a mercurial pattern
662 669 res = lambda p: os.path.join(dest,
663 670 os.path.basename(util.localpath(p)))
664 671 else:
665 672 abspfx = pathutil.canonpath(repo.root, cwd, pat)
666 673 if len(abspfx) < len(srcs[0][0]):
667 674 # A directory. Either the target path contains the last
668 675 # component of the source path or it does not.
669 676 def evalpath(striplen):
670 677 score = 0
671 678 for s in srcs:
672 679 t = os.path.join(dest, util.localpath(s[0])[striplen:])
673 680 if os.path.lexists(t):
674 681 score += 1
675 682 return score
676 683
677 684 abspfx = util.localpath(abspfx)
678 685 striplen = len(abspfx)
679 686 if striplen:
680 687 striplen += len(os.sep)
681 688 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
682 689 score = evalpath(striplen)
683 690 striplen1 = len(os.path.split(abspfx)[0])
684 691 if striplen1:
685 692 striplen1 += len(os.sep)
686 693 if evalpath(striplen1) > score:
687 694 striplen = striplen1
688 695 res = lambda p: os.path.join(dest,
689 696 util.localpath(p)[striplen:])
690 697 else:
691 698 # a file
692 699 if destdirexists:
693 700 res = lambda p: os.path.join(dest,
694 701 os.path.basename(util.localpath(p)))
695 702 else:
696 703 res = lambda p: dest
697 704 return res
698 705
699 706 pats = scmutil.expandpats(pats)
700 707 if not pats:
701 708 raise util.Abort(_('no source or destination specified'))
702 709 if len(pats) == 1:
703 710 raise util.Abort(_('no destination specified'))
704 711 dest = pats.pop()
705 712 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
706 713 if not destdirexists:
707 714 if len(pats) > 1 or matchmod.patkind(pats[0]):
708 715 raise util.Abort(_('with multiple sources, destination must be an '
709 716 'existing directory'))
710 717 if util.endswithsep(dest):
711 718 raise util.Abort(_('destination %s is not a directory') % dest)
712 719
713 720 tfn = targetpathfn
714 721 if after:
715 722 tfn = targetpathafterfn
716 723 copylist = []
717 724 for pat in pats:
718 725 srcs = walkpat(pat)
719 726 if not srcs:
720 727 continue
721 728 copylist.append((tfn(pat, dest, srcs), srcs))
722 729 if not copylist:
723 730 raise util.Abort(_('no files to copy'))
724 731
725 732 errors = 0
726 733 for targetpath, srcs in copylist:
727 734 for abssrc, relsrc, exact in srcs:
728 735 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
729 736 errors += 1
730 737
731 738 if errors:
732 739 ui.warn(_('(consider using --after)\n'))
733 740
734 741 return errors != 0
735 742
736 743 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
737 744 runargs=None, appendpid=False):
738 745 '''Run a command as a service.'''
739 746
740 747 def writepid(pid):
741 748 if opts['pid_file']:
742 749 if appendpid:
743 750 mode = 'a'
744 751 else:
745 752 mode = 'w'
746 753 fp = open(opts['pid_file'], mode)
747 754 fp.write(str(pid) + '\n')
748 755 fp.close()
749 756
750 757 if opts['daemon'] and not opts['daemon_pipefds']:
751 758 # Signal child process startup with file removal
752 759 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
753 760 os.close(lockfd)
754 761 try:
755 762 if not runargs:
756 763 runargs = util.hgcmd() + sys.argv[1:]
757 764 runargs.append('--daemon-pipefds=%s' % lockpath)
758 765 # Don't pass --cwd to the child process, because we've already
759 766 # changed directory.
760 767 for i in xrange(1, len(runargs)):
761 768 if runargs[i].startswith('--cwd='):
762 769 del runargs[i]
763 770 break
764 771 elif runargs[i].startswith('--cwd'):
765 772 del runargs[i:i + 2]
766 773 break
767 774 def condfn():
768 775 return not os.path.exists(lockpath)
769 776 pid = util.rundetached(runargs, condfn)
770 777 if pid < 0:
771 778 raise util.Abort(_('child process failed to start'))
772 779 writepid(pid)
773 780 finally:
774 781 try:
775 782 os.unlink(lockpath)
776 783 except OSError as e:
777 784 if e.errno != errno.ENOENT:
778 785 raise
779 786 if parentfn:
780 787 return parentfn(pid)
781 788 else:
782 789 return
783 790
784 791 if initfn:
785 792 initfn()
786 793
787 794 if not opts['daemon']:
788 795 writepid(os.getpid())
789 796
790 797 if opts['daemon_pipefds']:
791 798 lockpath = opts['daemon_pipefds']
792 799 try:
793 800 os.setsid()
794 801 except AttributeError:
795 802 pass
796 803 os.unlink(lockpath)
797 804 util.hidewindow()
798 805 sys.stdout.flush()
799 806 sys.stderr.flush()
800 807
801 808 nullfd = os.open(os.devnull, os.O_RDWR)
802 809 logfilefd = nullfd
803 810 if logfile:
804 811 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
805 812 os.dup2(nullfd, 0)
806 813 os.dup2(logfilefd, 1)
807 814 os.dup2(logfilefd, 2)
808 815 if nullfd not in (0, 1, 2):
809 816 os.close(nullfd)
810 817 if logfile and logfilefd not in (0, 1, 2):
811 818 os.close(logfilefd)
812 819
813 820 if runfn:
814 821 return runfn()
815 822
816 823 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
817 824 """Utility function used by commands.import to import a single patch
818 825
819 826 This function is explicitly defined here to help the evolve extension to
820 827 wrap this part of the import logic.
821 828
822 829 The API is currently a bit ugly because it a simple code translation from
823 830 the import command. Feel free to make it better.
824 831
825 832 :hunk: a patch (as a binary string)
826 833 :parents: nodes that will be parent of the created commit
827 834 :opts: the full dict of option passed to the import command
828 835 :msgs: list to save commit message to.
829 836 (used in case we need to save it when failing)
830 837 :updatefunc: a function that update a repo to a given node
831 838 updatefunc(<repo>, <node>)
832 839 """
833 840 tmpname, message, user, date, branch, nodeid, p1, p2 = \
834 841 patch.extract(ui, hunk)
835 842
836 843 update = not opts.get('bypass')
837 844 strip = opts["strip"]
838 845 prefix = opts["prefix"]
839 846 sim = float(opts.get('similarity') or 0)
840 847 if not tmpname:
841 848 return (None, None, False)
842 849 msg = _('applied to working directory')
843 850
844 851 rejects = False
845 852 dsguard = None
846 853
847 854 try:
848 855 cmdline_message = logmessage(ui, opts)
849 856 if cmdline_message:
850 857 # pickup the cmdline msg
851 858 message = cmdline_message
852 859 elif message:
853 860 # pickup the patch msg
854 861 message = message.strip()
855 862 else:
856 863 # launch the editor
857 864 message = None
858 865 ui.debug('message:\n%s\n' % message)
859 866
860 867 if len(parents) == 1:
861 868 parents.append(repo[nullid])
862 869 if opts.get('exact'):
863 870 if not nodeid or not p1:
864 871 raise util.Abort(_('not a Mercurial patch'))
865 872 p1 = repo[p1]
866 873 p2 = repo[p2 or nullid]
867 874 elif p2:
868 875 try:
869 876 p1 = repo[p1]
870 877 p2 = repo[p2]
871 878 # Without any options, consider p2 only if the
872 879 # patch is being applied on top of the recorded
873 880 # first parent.
874 881 if p1 != parents[0]:
875 882 p1 = parents[0]
876 883 p2 = repo[nullid]
877 884 except error.RepoError:
878 885 p1, p2 = parents
879 886 if p2.node() == nullid:
880 887 ui.warn(_("warning: import the patch as a normal revision\n"
881 888 "(use --exact to import the patch as a merge)\n"))
882 889 else:
883 890 p1, p2 = parents
884 891
885 892 n = None
886 893 if update:
887 894 dsguard = dirstateguard(repo, 'tryimportone')
888 895 if p1 != parents[0]:
889 896 updatefunc(repo, p1.node())
890 897 if p2 != parents[1]:
891 898 repo.setparents(p1.node(), p2.node())
892 899
893 900 if opts.get('exact') or opts.get('import_branch'):
894 901 repo.dirstate.setbranch(branch or 'default')
895 902
896 903 partial = opts.get('partial', False)
897 904 files = set()
898 905 try:
899 906 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
900 907 files=files, eolmode=None, similarity=sim / 100.0)
901 908 except patch.PatchError as e:
902 909 if not partial:
903 910 raise util.Abort(str(e))
904 911 if partial:
905 912 rejects = True
906 913
907 914 files = list(files)
908 915 if opts.get('no_commit'):
909 916 if message:
910 917 msgs.append(message)
911 918 else:
912 919 if opts.get('exact') or p2:
913 920 # If you got here, you either use --force and know what
914 921 # you are doing or used --exact or a merge patch while
915 922 # being updated to its first parent.
916 923 m = None
917 924 else:
918 925 m = scmutil.matchfiles(repo, files or [])
919 926 editform = mergeeditform(repo[None], 'import.normal')
920 927 if opts.get('exact'):
921 928 editor = None
922 929 else:
923 930 editor = getcommiteditor(editform=editform, **opts)
924 931 allowemptyback = repo.ui.backupconfig('ui', 'allowemptycommit')
925 932 try:
926 933 if partial:
927 934 repo.ui.setconfig('ui', 'allowemptycommit', True)
928 935 n = repo.commit(message, opts.get('user') or user,
929 936 opts.get('date') or date, match=m,
930 937 editor=editor)
931 938 finally:
932 939 repo.ui.restoreconfig(allowemptyback)
933 940 dsguard.close()
934 941 else:
935 942 if opts.get('exact') or opts.get('import_branch'):
936 943 branch = branch or 'default'
937 944 else:
938 945 branch = p1.branch()
939 946 store = patch.filestore()
940 947 try:
941 948 files = set()
942 949 try:
943 950 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
944 951 files, eolmode=None)
945 952 except patch.PatchError as e:
946 953 raise util.Abort(str(e))
947 954 if opts.get('exact'):
948 955 editor = None
949 956 else:
950 957 editor = getcommiteditor(editform='import.bypass')
951 958 memctx = context.makememctx(repo, (p1.node(), p2.node()),
952 959 message,
953 960 opts.get('user') or user,
954 961 opts.get('date') or date,
955 962 branch, files, store,
956 963 editor=editor)
957 964 n = memctx.commit()
958 965 finally:
959 966 store.close()
960 967 if opts.get('exact') and opts.get('no_commit'):
961 968 # --exact with --no-commit is still useful in that it does merge
962 969 # and branch bits
963 970 ui.warn(_("warning: can't check exact import with --no-commit\n"))
964 971 elif opts.get('exact') and hex(n) != nodeid:
965 972 raise util.Abort(_('patch is damaged or loses information'))
966 973 if n:
967 974 # i18n: refers to a short changeset id
968 975 msg = _('created %s') % short(n)
969 976 return (msg, n, rejects)
970 977 finally:
971 978 lockmod.release(dsguard)
972 979 os.unlink(tmpname)
973 980
974 981 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
975 982 opts=None):
976 983 '''export changesets as hg patches.'''
977 984
978 985 total = len(revs)
979 986 revwidth = max([len(str(rev)) for rev in revs])
980 987 filemode = {}
981 988
982 989 def single(rev, seqno, fp):
983 990 ctx = repo[rev]
984 991 node = ctx.node()
985 992 parents = [p.node() for p in ctx.parents() if p]
986 993 branch = ctx.branch()
987 994 if switch_parent:
988 995 parents.reverse()
989 996
990 997 if parents:
991 998 prev = parents[0]
992 999 else:
993 1000 prev = nullid
994 1001
995 1002 shouldclose = False
996 1003 if not fp and len(template) > 0:
997 1004 desc_lines = ctx.description().rstrip().split('\n')
998 1005 desc = desc_lines[0] #Commit always has a first line.
999 1006 fp = makefileobj(repo, template, node, desc=desc, total=total,
1000 1007 seqno=seqno, revwidth=revwidth, mode='wb',
1001 1008 modemap=filemode)
1002 1009 if fp != template:
1003 1010 shouldclose = True
1004 1011 if fp and fp != sys.stdout and util.safehasattr(fp, 'name'):
1005 1012 repo.ui.note("%s\n" % fp.name)
1006 1013
1007 1014 if not fp:
1008 1015 write = repo.ui.write
1009 1016 else:
1010 1017 def write(s, **kw):
1011 1018 fp.write(s)
1012 1019
1013 1020 write("# HG changeset patch\n")
1014 1021 write("# User %s\n" % ctx.user())
1015 1022 write("# Date %d %d\n" % ctx.date())
1016 1023 write("# %s\n" % util.datestr(ctx.date()))
1017 1024 if branch and branch != 'default':
1018 1025 write("# Branch %s\n" % branch)
1019 1026 write("# Node ID %s\n" % hex(node))
1020 1027 write("# Parent %s\n" % hex(prev))
1021 1028 if len(parents) > 1:
1022 1029 write("# Parent %s\n" % hex(parents[1]))
1023 1030 write(ctx.description().rstrip())
1024 1031 write("\n\n")
1025 1032
1026 1033 for chunk, label in patch.diffui(repo, prev, node, opts=opts):
1027 1034 write(chunk, label=label)
1028 1035
1029 1036 if shouldclose:
1030 1037 fp.close()
1031 1038
1032 1039 for seqno, rev in enumerate(revs):
1033 1040 single(rev, seqno + 1, fp)
1034 1041
1035 1042 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1036 1043 changes=None, stat=False, fp=None, prefix='',
1037 1044 root='', listsubrepos=False):
1038 1045 '''show diff or diffstat.'''
1039 1046 if fp is None:
1040 1047 write = ui.write
1041 1048 else:
1042 1049 def write(s, **kw):
1043 1050 fp.write(s)
1044 1051
1045 1052 if root:
1046 1053 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1047 1054 else:
1048 1055 relroot = ''
1049 1056 if relroot != '':
1050 1057 # XXX relative roots currently don't work if the root is within a
1051 1058 # subrepo
1052 1059 uirelroot = match.uipath(relroot)
1053 1060 relroot += '/'
1054 1061 for matchroot in match.files():
1055 1062 if not matchroot.startswith(relroot):
1056 1063 ui.warn(_('warning: %s not inside relative root %s\n') % (
1057 1064 match.uipath(matchroot), uirelroot))
1058 1065
1059 1066 if stat:
1060 1067 diffopts = diffopts.copy(context=0)
1061 1068 width = 80
1062 1069 if not ui.plain():
1063 1070 width = ui.termwidth()
1064 1071 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1065 1072 prefix=prefix, relroot=relroot)
1066 1073 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1067 1074 width=width,
1068 1075 git=diffopts.git):
1069 1076 write(chunk, label=label)
1070 1077 else:
1071 1078 for chunk, label in patch.diffui(repo, node1, node2, match,
1072 1079 changes, diffopts, prefix=prefix,
1073 1080 relroot=relroot):
1074 1081 write(chunk, label=label)
1075 1082
1076 1083 if listsubrepos:
1077 1084 ctx1 = repo[node1]
1078 1085 ctx2 = repo[node2]
1079 1086 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1080 1087 tempnode2 = node2
1081 1088 try:
1082 1089 if node2 is not None:
1083 1090 tempnode2 = ctx2.substate[subpath][1]
1084 1091 except KeyError:
1085 1092 # A subrepo that existed in node1 was deleted between node1 and
1086 1093 # node2 (inclusive). Thus, ctx2's substate won't contain that
1087 1094 # subpath. The best we can do is to ignore it.
1088 1095 tempnode2 = None
1089 1096 submatch = matchmod.narrowmatcher(subpath, match)
1090 1097 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1091 1098 stat=stat, fp=fp, prefix=prefix)
1092 1099
1093 1100 class changeset_printer(object):
1094 1101 '''show changeset information when templating not requested.'''
1095 1102
1096 1103 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1097 1104 self.ui = ui
1098 1105 self.repo = repo
1099 1106 self.buffered = buffered
1100 1107 self.matchfn = matchfn
1101 1108 self.diffopts = diffopts
1102 1109 self.header = {}
1103 1110 self.hunk = {}
1104 1111 self.lastheader = None
1105 1112 self.footer = None
1106 1113
1107 1114 def flush(self, rev):
1108 1115 if rev in self.header:
1109 1116 h = self.header[rev]
1110 1117 if h != self.lastheader:
1111 1118 self.lastheader = h
1112 1119 self.ui.write(h)
1113 1120 del self.header[rev]
1114 1121 if rev in self.hunk:
1115 1122 self.ui.write(self.hunk[rev])
1116 1123 del self.hunk[rev]
1117 1124 return 1
1118 1125 return 0
1119 1126
1120 1127 def close(self):
1121 1128 if self.footer:
1122 1129 self.ui.write(self.footer)
1123 1130
1124 1131 def show(self, ctx, copies=None, matchfn=None, **props):
1125 1132 if self.buffered:
1126 1133 self.ui.pushbuffer()
1127 1134 self._show(ctx, copies, matchfn, props)
1128 1135 self.hunk[ctx.rev()] = self.ui.popbuffer(labeled=True)
1129 1136 else:
1130 1137 self._show(ctx, copies, matchfn, props)
1131 1138
1132 1139 def _show(self, ctx, copies, matchfn, props):
1133 1140 '''show a single changeset or file revision'''
1134 1141 changenode = ctx.node()
1135 1142 rev = ctx.rev()
1136 1143 if self.ui.debugflag:
1137 1144 hexfunc = hex
1138 1145 else:
1139 1146 hexfunc = short
1140 1147 if rev is None:
1141 1148 pctx = ctx.p1()
1142 1149 revnode = (pctx.rev(), hexfunc(pctx.node()) + '+')
1143 1150 else:
1144 1151 revnode = (rev, hexfunc(changenode))
1145 1152
1146 1153 if self.ui.quiet:
1147 1154 self.ui.write("%d:%s\n" % revnode, label='log.node')
1148 1155 return
1149 1156
1150 1157 date = util.datestr(ctx.date())
1151 1158
1152 1159 # i18n: column positioning for "hg log"
1153 1160 self.ui.write(_("changeset: %d:%s\n") % revnode,
1154 1161 label='log.changeset changeset.%s' % ctx.phasestr())
1155 1162
1156 1163 # branches are shown first before any other names due to backwards
1157 1164 # compatibility
1158 1165 branch = ctx.branch()
1159 1166 # don't show the default branch name
1160 1167 if branch != 'default':
1161 1168 # i18n: column positioning for "hg log"
1162 1169 self.ui.write(_("branch: %s\n") % branch,
1163 1170 label='log.branch')
1164 1171
1165 1172 for name, ns in self.repo.names.iteritems():
1166 1173 # branches has special logic already handled above, so here we just
1167 1174 # skip it
1168 1175 if name == 'branches':
1169 1176 continue
1170 1177 # we will use the templatename as the color name since those two
1171 1178 # should be the same
1172 1179 for name in ns.names(self.repo, changenode):
1173 1180 self.ui.write(ns.logfmt % name,
1174 1181 label='log.%s' % ns.colorname)
1175 1182 if self.ui.debugflag:
1176 1183 # i18n: column positioning for "hg log"
1177 1184 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1178 1185 label='log.phase')
1179 1186 for pctx in self._meaningful_parentrevs(ctx):
1180 1187 label = 'log.parent changeset.%s' % pctx.phasestr()
1181 1188 # i18n: column positioning for "hg log"
1182 1189 self.ui.write(_("parent: %d:%s\n")
1183 1190 % (pctx.rev(), hexfunc(pctx.node())),
1184 1191 label=label)
1185 1192
1186 1193 if self.ui.debugflag and rev is not None:
1187 1194 mnode = ctx.manifestnode()
1188 1195 # i18n: column positioning for "hg log"
1189 1196 self.ui.write(_("manifest: %d:%s\n") %
1190 1197 (self.repo.manifest.rev(mnode), hex(mnode)),
1191 1198 label='ui.debug log.manifest')
1192 1199 # i18n: column positioning for "hg log"
1193 1200 self.ui.write(_("user: %s\n") % ctx.user(),
1194 1201 label='log.user')
1195 1202 # i18n: column positioning for "hg log"
1196 1203 self.ui.write(_("date: %s\n") % date,
1197 1204 label='log.date')
1198 1205
1199 1206 if self.ui.debugflag:
1200 1207 files = ctx.p1().status(ctx)[:3]
1201 1208 for key, value in zip([# i18n: column positioning for "hg log"
1202 1209 _("files:"),
1203 1210 # i18n: column positioning for "hg log"
1204 1211 _("files+:"),
1205 1212 # i18n: column positioning for "hg log"
1206 1213 _("files-:")], files):
1207 1214 if value:
1208 1215 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1209 1216 label='ui.debug log.files')
1210 1217 elif ctx.files() and self.ui.verbose:
1211 1218 # i18n: column positioning for "hg log"
1212 1219 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1213 1220 label='ui.note log.files')
1214 1221 if copies and self.ui.verbose:
1215 1222 copies = ['%s (%s)' % c for c in copies]
1216 1223 # i18n: column positioning for "hg log"
1217 1224 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1218 1225 label='ui.note log.copies')
1219 1226
1220 1227 extra = ctx.extra()
1221 1228 if extra and self.ui.debugflag:
1222 1229 for key, value in sorted(extra.items()):
1223 1230 # i18n: column positioning for "hg log"
1224 1231 self.ui.write(_("extra: %s=%s\n")
1225 1232 % (key, value.encode('string_escape')),
1226 1233 label='ui.debug log.extra')
1227 1234
1228 1235 description = ctx.description().strip()
1229 1236 if description:
1230 1237 if self.ui.verbose:
1231 1238 self.ui.write(_("description:\n"),
1232 1239 label='ui.note log.description')
1233 1240 self.ui.write(description,
1234 1241 label='ui.note log.description')
1235 1242 self.ui.write("\n\n")
1236 1243 else:
1237 1244 # i18n: column positioning for "hg log"
1238 1245 self.ui.write(_("summary: %s\n") %
1239 1246 description.splitlines()[0],
1240 1247 label='log.summary')
1241 1248 self.ui.write("\n")
1242 1249
1243 1250 self.showpatch(changenode, matchfn)
1244 1251
1245 1252 def showpatch(self, node, matchfn):
1246 1253 if not matchfn:
1247 1254 matchfn = self.matchfn
1248 1255 if matchfn:
1249 1256 stat = self.diffopts.get('stat')
1250 1257 diff = self.diffopts.get('patch')
1251 1258 diffopts = patch.diffallopts(self.ui, self.diffopts)
1252 1259 prev = self.repo.changelog.parents(node)[0]
1253 1260 if stat:
1254 1261 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1255 1262 match=matchfn, stat=True)
1256 1263 if diff:
1257 1264 if stat:
1258 1265 self.ui.write("\n")
1259 1266 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1260 1267 match=matchfn, stat=False)
1261 1268 self.ui.write("\n")
1262 1269
1263 1270 def _meaningful_parentrevs(self, ctx):
1264 1271 """Return list of meaningful (or all if debug) parentrevs for rev.
1265 1272
1266 1273 For merges (two non-nullrev revisions) both parents are meaningful.
1267 1274 Otherwise the first parent revision is considered meaningful if it
1268 1275 is not the preceding revision.
1269 1276 """
1270 1277 parents = ctx.parents()
1271 1278 if len(parents) > 1:
1272 1279 return parents
1273 1280 if self.ui.debugflag:
1274 1281 return [parents[0], self.repo['null']]
1275 1282 if parents[0].rev() >= scmutil.intrev(ctx.rev()) - 1:
1276 1283 return []
1277 1284 return parents
1278 1285
1279 1286 class jsonchangeset(changeset_printer):
1280 1287 '''format changeset information.'''
1281 1288
1282 1289 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1283 1290 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1284 1291 self.cache = {}
1285 1292 self._first = True
1286 1293
1287 1294 def close(self):
1288 1295 if not self._first:
1289 1296 self.ui.write("\n]\n")
1290 1297 else:
1291 1298 self.ui.write("[]\n")
1292 1299
1293 1300 def _show(self, ctx, copies, matchfn, props):
1294 1301 '''show a single changeset or file revision'''
1295 1302 rev = ctx.rev()
1296 1303 if rev is None:
1297 1304 jrev = jnode = 'null'
1298 1305 else:
1299 1306 jrev = str(rev)
1300 1307 jnode = '"%s"' % hex(ctx.node())
1301 1308 j = encoding.jsonescape
1302 1309
1303 1310 if self._first:
1304 1311 self.ui.write("[\n {")
1305 1312 self._first = False
1306 1313 else:
1307 1314 self.ui.write(",\n {")
1308 1315
1309 1316 if self.ui.quiet:
1310 1317 self.ui.write('\n "rev": %s' % jrev)
1311 1318 self.ui.write(',\n "node": %s' % jnode)
1312 1319 self.ui.write('\n }')
1313 1320 return
1314 1321
1315 1322 self.ui.write('\n "rev": %s' % jrev)
1316 1323 self.ui.write(',\n "node": %s' % jnode)
1317 1324 self.ui.write(',\n "branch": "%s"' % j(ctx.branch()))
1318 1325 self.ui.write(',\n "phase": "%s"' % ctx.phasestr())
1319 1326 self.ui.write(',\n "user": "%s"' % j(ctx.user()))
1320 1327 self.ui.write(',\n "date": [%d, %d]' % ctx.date())
1321 1328 self.ui.write(',\n "desc": "%s"' % j(ctx.description()))
1322 1329
1323 1330 self.ui.write(',\n "bookmarks": [%s]' %
1324 1331 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1325 1332 self.ui.write(',\n "tags": [%s]' %
1326 1333 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1327 1334 self.ui.write(',\n "parents": [%s]' %
1328 1335 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1329 1336
1330 1337 if self.ui.debugflag:
1331 1338 if rev is None:
1332 1339 jmanifestnode = 'null'
1333 1340 else:
1334 1341 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1335 1342 self.ui.write(',\n "manifest": %s' % jmanifestnode)
1336 1343
1337 1344 self.ui.write(',\n "extra": {%s}' %
1338 1345 ", ".join('"%s": "%s"' % (j(k), j(v))
1339 1346 for k, v in ctx.extra().items()))
1340 1347
1341 1348 files = ctx.p1().status(ctx)
1342 1349 self.ui.write(',\n "modified": [%s]' %
1343 1350 ", ".join('"%s"' % j(f) for f in files[0]))
1344 1351 self.ui.write(',\n "added": [%s]' %
1345 1352 ", ".join('"%s"' % j(f) for f in files[1]))
1346 1353 self.ui.write(',\n "removed": [%s]' %
1347 1354 ", ".join('"%s"' % j(f) for f in files[2]))
1348 1355
1349 1356 elif self.ui.verbose:
1350 1357 self.ui.write(',\n "files": [%s]' %
1351 1358 ", ".join('"%s"' % j(f) for f in ctx.files()))
1352 1359
1353 1360 if copies:
1354 1361 self.ui.write(',\n "copies": {%s}' %
1355 1362 ", ".join('"%s": "%s"' % (j(k), j(v))
1356 1363 for k, v in copies))
1357 1364
1358 1365 matchfn = self.matchfn
1359 1366 if matchfn:
1360 1367 stat = self.diffopts.get('stat')
1361 1368 diff = self.diffopts.get('patch')
1362 1369 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1363 1370 node, prev = ctx.node(), ctx.p1().node()
1364 1371 if stat:
1365 1372 self.ui.pushbuffer()
1366 1373 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1367 1374 match=matchfn, stat=True)
1368 1375 self.ui.write(',\n "diffstat": "%s"' % j(self.ui.popbuffer()))
1369 1376 if diff:
1370 1377 self.ui.pushbuffer()
1371 1378 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1372 1379 match=matchfn, stat=False)
1373 1380 self.ui.write(',\n "diff": "%s"' % j(self.ui.popbuffer()))
1374 1381
1375 1382 self.ui.write("\n }")
1376 1383
1377 1384 class changeset_templater(changeset_printer):
1378 1385 '''format changeset information.'''
1379 1386
1380 1387 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1381 1388 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1382 1389 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
1383 1390 defaulttempl = {
1384 1391 'parent': '{rev}:{node|formatnode} ',
1385 1392 'manifest': '{rev}:{node|formatnode}',
1386 1393 'file_copy': '{name} ({source})',
1387 1394 'extra': '{key}={value|stringescape}'
1388 1395 }
1389 1396 # filecopy is preserved for compatibility reasons
1390 1397 defaulttempl['filecopy'] = defaulttempl['file_copy']
1391 1398 self.t = templater.templater(mapfile, {'formatnode': formatnode},
1392 1399 cache=defaulttempl)
1393 1400 if tmpl:
1394 1401 self.t.cache['changeset'] = tmpl
1395 1402
1396 1403 self.cache = {}
1397 1404
1398 1405 def _show(self, ctx, copies, matchfn, props):
1399 1406 '''show a single changeset or file revision'''
1400 1407
1401 1408 showlist = templatekw.showlist
1402 1409
1403 1410 # showparents() behaviour depends on ui trace level which
1404 1411 # causes unexpected behaviours at templating level and makes
1405 1412 # it harder to extract it in a standalone function. Its
1406 1413 # behaviour cannot be changed so leave it here for now.
1407 1414 def showparents(**args):
1408 1415 ctx = args['ctx']
1409 1416 parents = [[('rev', p.rev()),
1410 1417 ('node', p.hex()),
1411 1418 ('phase', p.phasestr())]
1412 1419 for p in self._meaningful_parentrevs(ctx)]
1413 1420 return showlist('parent', parents, **args)
1414 1421
1415 1422 props = props.copy()
1416 1423 props.update(templatekw.keywords)
1417 1424 props['parents'] = showparents
1418 1425 props['templ'] = self.t
1419 1426 props['ctx'] = ctx
1420 1427 props['repo'] = self.repo
1421 1428 props['revcache'] = {'copies': copies}
1422 1429 props['cache'] = self.cache
1423 1430
1424 1431 # find correct templates for current mode
1425 1432
1426 1433 tmplmodes = [
1427 1434 (True, None),
1428 1435 (self.ui.verbose, 'verbose'),
1429 1436 (self.ui.quiet, 'quiet'),
1430 1437 (self.ui.debugflag, 'debug'),
1431 1438 ]
1432 1439
1433 1440 types = {'header': '', 'footer':'', 'changeset': 'changeset'}
1434 1441 for mode, postfix in tmplmodes:
1435 1442 for type in types:
1436 1443 cur = postfix and ('%s_%s' % (type, postfix)) or type
1437 1444 if mode and cur in self.t:
1438 1445 types[type] = cur
1439 1446
1440 1447 try:
1441 1448
1442 1449 # write header
1443 1450 if types['header']:
1444 1451 h = templater.stringify(self.t(types['header'], **props))
1445 1452 if self.buffered:
1446 1453 self.header[ctx.rev()] = h
1447 1454 else:
1448 1455 if self.lastheader != h:
1449 1456 self.lastheader = h
1450 1457 self.ui.write(h)
1451 1458
1452 1459 # write changeset metadata, then patch if requested
1453 1460 key = types['changeset']
1454 1461 self.ui.write(templater.stringify(self.t(key, **props)))
1455 1462 self.showpatch(ctx.node(), matchfn)
1456 1463
1457 1464 if types['footer']:
1458 1465 if not self.footer:
1459 1466 self.footer = templater.stringify(self.t(types['footer'],
1460 1467 **props))
1461 1468
1462 1469 except KeyError as inst:
1463 1470 msg = _("%s: no key named '%s'")
1464 1471 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
1465 1472 except SyntaxError as inst:
1466 1473 raise util.Abort('%s: %s' % (self.t.mapfile, inst.args[0]))
1467 1474
1468 1475 def gettemplate(ui, tmpl, style):
1469 1476 """
1470 1477 Find the template matching the given template spec or style.
1471 1478 """
1472 1479
1473 1480 # ui settings
1474 1481 if not tmpl and not style: # template are stronger than style
1475 1482 tmpl = ui.config('ui', 'logtemplate')
1476 1483 if tmpl:
1477 1484 try:
1478 1485 tmpl = templater.unquotestring(tmpl)
1479 1486 except SyntaxError:
1480 1487 pass
1481 1488 return tmpl, None
1482 1489 else:
1483 1490 style = util.expandpath(ui.config('ui', 'style', ''))
1484 1491
1485 1492 if not tmpl and style:
1486 1493 mapfile = style
1487 1494 if not os.path.split(mapfile)[0]:
1488 1495 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1489 1496 or templater.templatepath(mapfile))
1490 1497 if mapname:
1491 1498 mapfile = mapname
1492 1499 return None, mapfile
1493 1500
1494 1501 if not tmpl:
1495 1502 return None, None
1496 1503
1497 1504 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1498 1505
1499 1506 def show_changeset(ui, repo, opts, buffered=False):
1500 1507 """show one changeset using template or regular display.
1501 1508
1502 1509 Display format will be the first non-empty hit of:
1503 1510 1. option 'template'
1504 1511 2. option 'style'
1505 1512 3. [ui] setting 'logtemplate'
1506 1513 4. [ui] setting 'style'
1507 1514 If all of these values are either the unset or the empty string,
1508 1515 regular display via changeset_printer() is done.
1509 1516 """
1510 1517 # options
1511 1518 matchfn = None
1512 1519 if opts.get('patch') or opts.get('stat'):
1513 1520 matchfn = scmutil.matchall(repo)
1514 1521
1515 1522 if opts.get('template') == 'json':
1516 1523 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1517 1524
1518 1525 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1519 1526
1520 1527 if not tmpl and not mapfile:
1521 1528 return changeset_printer(ui, repo, matchfn, opts, buffered)
1522 1529
1523 1530 try:
1524 1531 t = changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile,
1525 1532 buffered)
1526 1533 except SyntaxError as inst:
1527 1534 raise util.Abort(inst.args[0])
1528 1535 return t
1529 1536
1530 1537 def showmarker(ui, marker):
1531 1538 """utility function to display obsolescence marker in a readable way
1532 1539
1533 1540 To be used by debug function."""
1534 1541 ui.write(hex(marker.precnode()))
1535 1542 for repl in marker.succnodes():
1536 1543 ui.write(' ')
1537 1544 ui.write(hex(repl))
1538 1545 ui.write(' %X ' % marker.flags())
1539 1546 parents = marker.parentnodes()
1540 1547 if parents is not None:
1541 1548 ui.write('{%s} ' % ', '.join(hex(p) for p in parents))
1542 1549 ui.write('(%s) ' % util.datestr(marker.date()))
1543 1550 ui.write('{%s}' % (', '.join('%r: %r' % t for t in
1544 1551 sorted(marker.metadata().items())
1545 1552 if t[0] != 'date')))
1546 1553 ui.write('\n')
1547 1554
1548 1555 def finddate(ui, repo, date):
1549 1556 """Find the tipmost changeset that matches the given date spec"""
1550 1557
1551 1558 df = util.matchdate(date)
1552 1559 m = scmutil.matchall(repo)
1553 1560 results = {}
1554 1561
1555 1562 def prep(ctx, fns):
1556 1563 d = ctx.date()
1557 1564 if df(d[0]):
1558 1565 results[ctx.rev()] = d
1559 1566
1560 1567 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1561 1568 rev = ctx.rev()
1562 1569 if rev in results:
1563 1570 ui.status(_("found revision %s from %s\n") %
1564 1571 (rev, util.datestr(results[rev])))
1565 1572 return str(rev)
1566 1573
1567 1574 raise util.Abort(_("revision matching date not found"))
1568 1575
1569 1576 def increasingwindows(windowsize=8, sizelimit=512):
1570 1577 while True:
1571 1578 yield windowsize
1572 1579 if windowsize < sizelimit:
1573 1580 windowsize *= 2
1574 1581
1575 1582 class FileWalkError(Exception):
1576 1583 pass
1577 1584
1578 1585 def walkfilerevs(repo, match, follow, revs, fncache):
1579 1586 '''Walks the file history for the matched files.
1580 1587
1581 1588 Returns the changeset revs that are involved in the file history.
1582 1589
1583 1590 Throws FileWalkError if the file history can't be walked using
1584 1591 filelogs alone.
1585 1592 '''
1586 1593 wanted = set()
1587 1594 copies = []
1588 1595 minrev, maxrev = min(revs), max(revs)
1589 1596 def filerevgen(filelog, last):
1590 1597 """
1591 1598 Only files, no patterns. Check the history of each file.
1592 1599
1593 1600 Examines filelog entries within minrev, maxrev linkrev range
1594 1601 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1595 1602 tuples in backwards order
1596 1603 """
1597 1604 cl_count = len(repo)
1598 1605 revs = []
1599 1606 for j in xrange(0, last + 1):
1600 1607 linkrev = filelog.linkrev(j)
1601 1608 if linkrev < minrev:
1602 1609 continue
1603 1610 # only yield rev for which we have the changelog, it can
1604 1611 # happen while doing "hg log" during a pull or commit
1605 1612 if linkrev >= cl_count:
1606 1613 break
1607 1614
1608 1615 parentlinkrevs = []
1609 1616 for p in filelog.parentrevs(j):
1610 1617 if p != nullrev:
1611 1618 parentlinkrevs.append(filelog.linkrev(p))
1612 1619 n = filelog.node(j)
1613 1620 revs.append((linkrev, parentlinkrevs,
1614 1621 follow and filelog.renamed(n)))
1615 1622
1616 1623 return reversed(revs)
1617 1624 def iterfiles():
1618 1625 pctx = repo['.']
1619 1626 for filename in match.files():
1620 1627 if follow:
1621 1628 if filename not in pctx:
1622 1629 raise util.Abort(_('cannot follow file not in parent '
1623 1630 'revision: "%s"') % filename)
1624 1631 yield filename, pctx[filename].filenode()
1625 1632 else:
1626 1633 yield filename, None
1627 1634 for filename_node in copies:
1628 1635 yield filename_node
1629 1636
1630 1637 for file_, node in iterfiles():
1631 1638 filelog = repo.file(file_)
1632 1639 if not len(filelog):
1633 1640 if node is None:
1634 1641 # A zero count may be a directory or deleted file, so
1635 1642 # try to find matching entries on the slow path.
1636 1643 if follow:
1637 1644 raise util.Abort(
1638 1645 _('cannot follow nonexistent file: "%s"') % file_)
1639 1646 raise FileWalkError("Cannot walk via filelog")
1640 1647 else:
1641 1648 continue
1642 1649
1643 1650 if node is None:
1644 1651 last = len(filelog) - 1
1645 1652 else:
1646 1653 last = filelog.rev(node)
1647 1654
1648 1655 # keep track of all ancestors of the file
1649 1656 ancestors = set([filelog.linkrev(last)])
1650 1657
1651 1658 # iterate from latest to oldest revision
1652 1659 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1653 1660 if not follow:
1654 1661 if rev > maxrev:
1655 1662 continue
1656 1663 else:
1657 1664 # Note that last might not be the first interesting
1658 1665 # rev to us:
1659 1666 # if the file has been changed after maxrev, we'll
1660 1667 # have linkrev(last) > maxrev, and we still need
1661 1668 # to explore the file graph
1662 1669 if rev not in ancestors:
1663 1670 continue
1664 1671 # XXX insert 1327 fix here
1665 1672 if flparentlinkrevs:
1666 1673 ancestors.update(flparentlinkrevs)
1667 1674
1668 1675 fncache.setdefault(rev, []).append(file_)
1669 1676 wanted.add(rev)
1670 1677 if copied:
1671 1678 copies.append(copied)
1672 1679
1673 1680 return wanted
1674 1681
1675 1682 class _followfilter(object):
1676 1683 def __init__(self, repo, onlyfirst=False):
1677 1684 self.repo = repo
1678 1685 self.startrev = nullrev
1679 1686 self.roots = set()
1680 1687 self.onlyfirst = onlyfirst
1681 1688
1682 1689 def match(self, rev):
1683 1690 def realparents(rev):
1684 1691 if self.onlyfirst:
1685 1692 return self.repo.changelog.parentrevs(rev)[0:1]
1686 1693 else:
1687 1694 return filter(lambda x: x != nullrev,
1688 1695 self.repo.changelog.parentrevs(rev))
1689 1696
1690 1697 if self.startrev == nullrev:
1691 1698 self.startrev = rev
1692 1699 return True
1693 1700
1694 1701 if rev > self.startrev:
1695 1702 # forward: all descendants
1696 1703 if not self.roots:
1697 1704 self.roots.add(self.startrev)
1698 1705 for parent in realparents(rev):
1699 1706 if parent in self.roots:
1700 1707 self.roots.add(rev)
1701 1708 return True
1702 1709 else:
1703 1710 # backwards: all parents
1704 1711 if not self.roots:
1705 1712 self.roots.update(realparents(self.startrev))
1706 1713 if rev in self.roots:
1707 1714 self.roots.remove(rev)
1708 1715 self.roots.update(realparents(rev))
1709 1716 return True
1710 1717
1711 1718 return False
1712 1719
1713 1720 def walkchangerevs(repo, match, opts, prepare):
1714 1721 '''Iterate over files and the revs in which they changed.
1715 1722
1716 1723 Callers most commonly need to iterate backwards over the history
1717 1724 in which they are interested. Doing so has awful (quadratic-looking)
1718 1725 performance, so we use iterators in a "windowed" way.
1719 1726
1720 1727 We walk a window of revisions in the desired order. Within the
1721 1728 window, we first walk forwards to gather data, then in the desired
1722 1729 order (usually backwards) to display it.
1723 1730
1724 1731 This function returns an iterator yielding contexts. Before
1725 1732 yielding each context, the iterator will first call the prepare
1726 1733 function on each context in the window in forward order.'''
1727 1734
1728 1735 follow = opts.get('follow') or opts.get('follow_first')
1729 1736 revs = _logrevs(repo, opts)
1730 1737 if not revs:
1731 1738 return []
1732 1739 wanted = set()
1733 1740 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1734 1741 opts.get('removed'))
1735 1742 fncache = {}
1736 1743 change = repo.changectx
1737 1744
1738 1745 # First step is to fill wanted, the set of revisions that we want to yield.
1739 1746 # When it does not induce extra cost, we also fill fncache for revisions in
1740 1747 # wanted: a cache of filenames that were changed (ctx.files()) and that
1741 1748 # match the file filtering conditions.
1742 1749
1743 1750 if match.always():
1744 1751 # No files, no patterns. Display all revs.
1745 1752 wanted = revs
1746 1753 elif not slowpath:
1747 1754 # We only have to read through the filelog to find wanted revisions
1748 1755
1749 1756 try:
1750 1757 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1751 1758 except FileWalkError:
1752 1759 slowpath = True
1753 1760
1754 1761 # We decided to fall back to the slowpath because at least one
1755 1762 # of the paths was not a file. Check to see if at least one of them
1756 1763 # existed in history, otherwise simply return
1757 1764 for path in match.files():
1758 1765 if path == '.' or path in repo.store:
1759 1766 break
1760 1767 else:
1761 1768 return []
1762 1769
1763 1770 if slowpath:
1764 1771 # We have to read the changelog to match filenames against
1765 1772 # changed files
1766 1773
1767 1774 if follow:
1768 1775 raise util.Abort(_('can only follow copies/renames for explicit '
1769 1776 'filenames'))
1770 1777
1771 1778 # The slow path checks files modified in every changeset.
1772 1779 # This is really slow on large repos, so compute the set lazily.
1773 1780 class lazywantedset(object):
1774 1781 def __init__(self):
1775 1782 self.set = set()
1776 1783 self.revs = set(revs)
1777 1784
1778 1785 # No need to worry about locality here because it will be accessed
1779 1786 # in the same order as the increasing window below.
1780 1787 def __contains__(self, value):
1781 1788 if value in self.set:
1782 1789 return True
1783 1790 elif not value in self.revs:
1784 1791 return False
1785 1792 else:
1786 1793 self.revs.discard(value)
1787 1794 ctx = change(value)
1788 1795 matches = filter(match, ctx.files())
1789 1796 if matches:
1790 1797 fncache[value] = matches
1791 1798 self.set.add(value)
1792 1799 return True
1793 1800 return False
1794 1801
1795 1802 def discard(self, value):
1796 1803 self.revs.discard(value)
1797 1804 self.set.discard(value)
1798 1805
1799 1806 wanted = lazywantedset()
1800 1807
1801 1808 # it might be worthwhile to do this in the iterator if the rev range
1802 1809 # is descending and the prune args are all within that range
1803 1810 for rev in opts.get('prune', ()):
1804 1811 rev = repo[rev].rev()
1805 1812 ff = _followfilter(repo)
1806 1813 stop = min(revs[0], revs[-1])
1807 1814 for x in xrange(rev, stop - 1, -1):
1808 1815 if ff.match(x):
1809 1816 wanted = wanted - [x]
1810 1817
1811 1818 # Now that wanted is correctly initialized, we can iterate over the
1812 1819 # revision range, yielding only revisions in wanted.
1813 1820 def iterate():
1814 1821 if follow and match.always():
1815 1822 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1816 1823 def want(rev):
1817 1824 return ff.match(rev) and rev in wanted
1818 1825 else:
1819 1826 def want(rev):
1820 1827 return rev in wanted
1821 1828
1822 1829 it = iter(revs)
1823 1830 stopiteration = False
1824 1831 for windowsize in increasingwindows():
1825 1832 nrevs = []
1826 1833 for i in xrange(windowsize):
1827 1834 rev = next(it, None)
1828 1835 if rev is None:
1829 1836 stopiteration = True
1830 1837 break
1831 1838 elif want(rev):
1832 1839 nrevs.append(rev)
1833 1840 for rev in sorted(nrevs):
1834 1841 fns = fncache.get(rev)
1835 1842 ctx = change(rev)
1836 1843 if not fns:
1837 1844 def fns_generator():
1838 1845 for f in ctx.files():
1839 1846 if match(f):
1840 1847 yield f
1841 1848 fns = fns_generator()
1842 1849 prepare(ctx, fns)
1843 1850 for rev in nrevs:
1844 1851 yield change(rev)
1845 1852
1846 1853 if stopiteration:
1847 1854 break
1848 1855
1849 1856 return iterate()
1850 1857
1851 1858 def _makefollowlogfilematcher(repo, files, followfirst):
1852 1859 # When displaying a revision with --patch --follow FILE, we have
1853 1860 # to know which file of the revision must be diffed. With
1854 1861 # --follow, we want the names of the ancestors of FILE in the
1855 1862 # revision, stored in "fcache". "fcache" is populated by
1856 1863 # reproducing the graph traversal already done by --follow revset
1857 1864 # and relating linkrevs to file names (which is not "correct" but
1858 1865 # good enough).
1859 1866 fcache = {}
1860 1867 fcacheready = [False]
1861 1868 pctx = repo['.']
1862 1869
1863 1870 def populate():
1864 1871 for fn in files:
1865 1872 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
1866 1873 for c in i:
1867 1874 fcache.setdefault(c.linkrev(), set()).add(c.path())
1868 1875
1869 1876 def filematcher(rev):
1870 1877 if not fcacheready[0]:
1871 1878 # Lazy initialization
1872 1879 fcacheready[0] = True
1873 1880 populate()
1874 1881 return scmutil.matchfiles(repo, fcache.get(rev, []))
1875 1882
1876 1883 return filematcher
1877 1884
1878 1885 def _makenofollowlogfilematcher(repo, pats, opts):
1879 1886 '''hook for extensions to override the filematcher for non-follow cases'''
1880 1887 return None
1881 1888
1882 1889 def _makelogrevset(repo, pats, opts, revs):
1883 1890 """Return (expr, filematcher) where expr is a revset string built
1884 1891 from log options and file patterns or None. If --stat or --patch
1885 1892 are not passed filematcher is None. Otherwise it is a callable
1886 1893 taking a revision number and returning a match objects filtering
1887 1894 the files to be detailed when displaying the revision.
1888 1895 """
1889 1896 opt2revset = {
1890 1897 'no_merges': ('not merge()', None),
1891 1898 'only_merges': ('merge()', None),
1892 1899 '_ancestors': ('ancestors(%(val)s)', None),
1893 1900 '_fancestors': ('_firstancestors(%(val)s)', None),
1894 1901 '_descendants': ('descendants(%(val)s)', None),
1895 1902 '_fdescendants': ('_firstdescendants(%(val)s)', None),
1896 1903 '_matchfiles': ('_matchfiles(%(val)s)', None),
1897 1904 'date': ('date(%(val)r)', None),
1898 1905 'branch': ('branch(%(val)r)', ' or '),
1899 1906 '_patslog': ('filelog(%(val)r)', ' or '),
1900 1907 '_patsfollow': ('follow(%(val)r)', ' or '),
1901 1908 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
1902 1909 'keyword': ('keyword(%(val)r)', ' or '),
1903 1910 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
1904 1911 'user': ('user(%(val)r)', ' or '),
1905 1912 }
1906 1913
1907 1914 opts = dict(opts)
1908 1915 # follow or not follow?
1909 1916 follow = opts.get('follow') or opts.get('follow_first')
1910 1917 if opts.get('follow_first'):
1911 1918 followfirst = 1
1912 1919 else:
1913 1920 followfirst = 0
1914 1921 # --follow with FILE behaviour depends on revs...
1915 1922 it = iter(revs)
1916 1923 startrev = it.next()
1917 1924 followdescendants = startrev < next(it, startrev)
1918 1925
1919 1926 # branch and only_branch are really aliases and must be handled at
1920 1927 # the same time
1921 1928 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
1922 1929 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
1923 1930 # pats/include/exclude are passed to match.match() directly in
1924 1931 # _matchfiles() revset but walkchangerevs() builds its matcher with
1925 1932 # scmutil.match(). The difference is input pats are globbed on
1926 1933 # platforms without shell expansion (windows).
1927 1934 wctx = repo[None]
1928 1935 match, pats = scmutil.matchandpats(wctx, pats, opts)
1929 1936 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1930 1937 opts.get('removed'))
1931 1938 if not slowpath:
1932 1939 for f in match.files():
1933 1940 if follow and f not in wctx:
1934 1941 # If the file exists, it may be a directory, so let it
1935 1942 # take the slow path.
1936 1943 if os.path.exists(repo.wjoin(f)):
1937 1944 slowpath = True
1938 1945 continue
1939 1946 else:
1940 1947 raise util.Abort(_('cannot follow file not in parent '
1941 1948 'revision: "%s"') % f)
1942 1949 filelog = repo.file(f)
1943 1950 if not filelog:
1944 1951 # A zero count may be a directory or deleted file, so
1945 1952 # try to find matching entries on the slow path.
1946 1953 if follow:
1947 1954 raise util.Abort(
1948 1955 _('cannot follow nonexistent file: "%s"') % f)
1949 1956 slowpath = True
1950 1957
1951 1958 # We decided to fall back to the slowpath because at least one
1952 1959 # of the paths was not a file. Check to see if at least one of them
1953 1960 # existed in history - in that case, we'll continue down the
1954 1961 # slowpath; otherwise, we can turn off the slowpath
1955 1962 if slowpath:
1956 1963 for path in match.files():
1957 1964 if path == '.' or path in repo.store:
1958 1965 break
1959 1966 else:
1960 1967 slowpath = False
1961 1968
1962 1969 fpats = ('_patsfollow', '_patsfollowfirst')
1963 1970 fnopats = (('_ancestors', '_fancestors'),
1964 1971 ('_descendants', '_fdescendants'))
1965 1972 if slowpath:
1966 1973 # See walkchangerevs() slow path.
1967 1974 #
1968 1975 # pats/include/exclude cannot be represented as separate
1969 1976 # revset expressions as their filtering logic applies at file
1970 1977 # level. For instance "-I a -X a" matches a revision touching
1971 1978 # "a" and "b" while "file(a) and not file(b)" does
1972 1979 # not. Besides, filesets are evaluated against the working
1973 1980 # directory.
1974 1981 matchargs = ['r:', 'd:relpath']
1975 1982 for p in pats:
1976 1983 matchargs.append('p:' + p)
1977 1984 for p in opts.get('include', []):
1978 1985 matchargs.append('i:' + p)
1979 1986 for p in opts.get('exclude', []):
1980 1987 matchargs.append('x:' + p)
1981 1988 matchargs = ','.join(('%r' % p) for p in matchargs)
1982 1989 opts['_matchfiles'] = matchargs
1983 1990 if follow:
1984 1991 opts[fnopats[0][followfirst]] = '.'
1985 1992 else:
1986 1993 if follow:
1987 1994 if pats:
1988 1995 # follow() revset interprets its file argument as a
1989 1996 # manifest entry, so use match.files(), not pats.
1990 1997 opts[fpats[followfirst]] = list(match.files())
1991 1998 else:
1992 1999 op = fnopats[followdescendants][followfirst]
1993 2000 opts[op] = 'rev(%d)' % startrev
1994 2001 else:
1995 2002 opts['_patslog'] = list(pats)
1996 2003
1997 2004 filematcher = None
1998 2005 if opts.get('patch') or opts.get('stat'):
1999 2006 # When following files, track renames via a special matcher.
2000 2007 # If we're forced to take the slowpath it means we're following
2001 2008 # at least one pattern/directory, so don't bother with rename tracking.
2002 2009 if follow and not match.always() and not slowpath:
2003 2010 # _makefollowlogfilematcher expects its files argument to be
2004 2011 # relative to the repo root, so use match.files(), not pats.
2005 2012 filematcher = _makefollowlogfilematcher(repo, match.files(),
2006 2013 followfirst)
2007 2014 else:
2008 2015 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2009 2016 if filematcher is None:
2010 2017 filematcher = lambda rev: match
2011 2018
2012 2019 expr = []
2013 2020 for op, val in sorted(opts.iteritems()):
2014 2021 if not val:
2015 2022 continue
2016 2023 if op not in opt2revset:
2017 2024 continue
2018 2025 revop, andor = opt2revset[op]
2019 2026 if '%(val)' not in revop:
2020 2027 expr.append(revop)
2021 2028 else:
2022 2029 if not isinstance(val, list):
2023 2030 e = revop % {'val': val}
2024 2031 else:
2025 2032 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2026 2033 expr.append(e)
2027 2034
2028 2035 if expr:
2029 2036 expr = '(' + ' and '.join(expr) + ')'
2030 2037 else:
2031 2038 expr = None
2032 2039 return expr, filematcher
2033 2040
2034 2041 def _logrevs(repo, opts):
2035 2042 # Default --rev value depends on --follow but --follow behaviour
2036 2043 # depends on revisions resolved from --rev...
2037 2044 follow = opts.get('follow') or opts.get('follow_first')
2038 2045 if opts.get('rev'):
2039 2046 revs = scmutil.revrange(repo, opts['rev'])
2040 2047 elif follow and repo.dirstate.p1() == nullid:
2041 2048 revs = revset.baseset()
2042 2049 elif follow:
2043 2050 revs = repo.revs('reverse(:.)')
2044 2051 else:
2045 2052 revs = revset.spanset(repo)
2046 2053 revs.reverse()
2047 2054 return revs
2048 2055
2049 2056 def getgraphlogrevs(repo, pats, opts):
2050 2057 """Return (revs, expr, filematcher) where revs is an iterable of
2051 2058 revision numbers, expr is a revset string built from log options
2052 2059 and file patterns or None, and used to filter 'revs'. If --stat or
2053 2060 --patch are not passed filematcher is None. Otherwise it is a
2054 2061 callable taking a revision number and returning a match objects
2055 2062 filtering the files to be detailed when displaying the revision.
2056 2063 """
2057 2064 limit = loglimit(opts)
2058 2065 revs = _logrevs(repo, opts)
2059 2066 if not revs:
2060 2067 return revset.baseset(), None, None
2061 2068 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2062 2069 if opts.get('rev'):
2063 2070 # User-specified revs might be unsorted, but don't sort before
2064 2071 # _makelogrevset because it might depend on the order of revs
2065 2072 revs.sort(reverse=True)
2066 2073 if expr:
2067 2074 # Revset matchers often operate faster on revisions in changelog
2068 2075 # order, because most filters deal with the changelog.
2069 2076 revs.reverse()
2070 2077 matcher = revset.match(repo.ui, expr)
2071 2078 # Revset matches can reorder revisions. "A or B" typically returns
2072 2079 # returns the revision matching A then the revision matching B. Sort
2073 2080 # again to fix that.
2074 2081 revs = matcher(repo, revs)
2075 2082 revs.sort(reverse=True)
2076 2083 if limit is not None:
2077 2084 limitedrevs = []
2078 2085 for idx, rev in enumerate(revs):
2079 2086 if idx >= limit:
2080 2087 break
2081 2088 limitedrevs.append(rev)
2082 2089 revs = revset.baseset(limitedrevs)
2083 2090
2084 2091 return revs, expr, filematcher
2085 2092
2086 2093 def getlogrevs(repo, pats, opts):
2087 2094 """Return (revs, expr, filematcher) where revs is an iterable of
2088 2095 revision numbers, expr is a revset string built from log options
2089 2096 and file patterns or None, and used to filter 'revs'. If --stat or
2090 2097 --patch are not passed filematcher is None. Otherwise it is a
2091 2098 callable taking a revision number and returning a match objects
2092 2099 filtering the files to be detailed when displaying the revision.
2093 2100 """
2094 2101 limit = loglimit(opts)
2095 2102 revs = _logrevs(repo, opts)
2096 2103 if not revs:
2097 2104 return revset.baseset([]), None, None
2098 2105 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2099 2106 if expr:
2100 2107 # Revset matchers often operate faster on revisions in changelog
2101 2108 # order, because most filters deal with the changelog.
2102 2109 if not opts.get('rev'):
2103 2110 revs.reverse()
2104 2111 matcher = revset.match(repo.ui, expr)
2105 2112 # Revset matches can reorder revisions. "A or B" typically returns
2106 2113 # returns the revision matching A then the revision matching B. Sort
2107 2114 # again to fix that.
2108 2115 revs = matcher(repo, revs)
2109 2116 if not opts.get('rev'):
2110 2117 revs.sort(reverse=True)
2111 2118 if limit is not None:
2112 2119 limitedrevs = []
2113 2120 for idx, r in enumerate(revs):
2114 2121 if limit <= idx:
2115 2122 break
2116 2123 limitedrevs.append(r)
2117 2124 revs = revset.baseset(limitedrevs)
2118 2125
2119 2126 return revs, expr, filematcher
2120 2127
2121 2128 def displaygraph(ui, dag, displayer, showparents, edgefn, getrenamed=None,
2122 2129 filematcher=None):
2123 2130 seen, state = [], graphmod.asciistate()
2124 2131 for rev, type, ctx, parents in dag:
2125 2132 char = 'o'
2126 2133 if ctx.node() in showparents:
2127 2134 char = '@'
2128 2135 elif ctx.obsolete():
2129 2136 char = 'x'
2130 2137 elif ctx.closesbranch():
2131 2138 char = '_'
2132 2139 copies = None
2133 2140 if getrenamed and ctx.rev():
2134 2141 copies = []
2135 2142 for fn in ctx.files():
2136 2143 rename = getrenamed(fn, ctx.rev())
2137 2144 if rename:
2138 2145 copies.append((fn, rename[0]))
2139 2146 revmatchfn = None
2140 2147 if filematcher is not None:
2141 2148 revmatchfn = filematcher(ctx.rev())
2142 2149 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2143 2150 lines = displayer.hunk.pop(rev).split('\n')
2144 2151 if not lines[-1]:
2145 2152 del lines[-1]
2146 2153 displayer.flush(rev)
2147 2154 edges = edgefn(type, char, lines, seen, rev, parents)
2148 2155 for type, char, lines, coldata in edges:
2149 2156 graphmod.ascii(ui, state, type, char, lines, coldata)
2150 2157 displayer.close()
2151 2158
2152 2159 def graphlog(ui, repo, *pats, **opts):
2153 2160 # Parameters are identical to log command ones
2154 2161 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2155 2162 revdag = graphmod.dagwalker(repo, revs)
2156 2163
2157 2164 getrenamed = None
2158 2165 if opts.get('copies'):
2159 2166 endrev = None
2160 2167 if opts.get('rev'):
2161 2168 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2162 2169 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2163 2170 displayer = show_changeset(ui, repo, opts, buffered=True)
2164 2171 showparents = [ctx.node() for ctx in repo[None].parents()]
2165 2172 displaygraph(ui, revdag, displayer, showparents,
2166 2173 graphmod.asciiedges, getrenamed, filematcher)
2167 2174
2168 2175 def checkunsupportedgraphflags(pats, opts):
2169 2176 for op in ["newest_first"]:
2170 2177 if op in opts and opts[op]:
2171 2178 raise util.Abort(_("-G/--graph option is incompatible with --%s")
2172 2179 % op.replace("_", "-"))
2173 2180
2174 2181 def graphrevs(repo, nodes, opts):
2175 2182 limit = loglimit(opts)
2176 2183 nodes.reverse()
2177 2184 if limit is not None:
2178 2185 nodes = nodes[:limit]
2179 2186 return graphmod.nodes(repo, nodes)
2180 2187
2181 2188 def add(ui, repo, match, prefix, explicitonly, **opts):
2182 2189 join = lambda f: os.path.join(prefix, f)
2183 2190 bad = []
2184 2191
2185 2192 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2186 2193 names = []
2187 2194 wctx = repo[None]
2188 2195 cca = None
2189 2196 abort, warn = scmutil.checkportabilityalert(ui)
2190 2197 if abort or warn:
2191 2198 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2192 2199
2193 2200 for f in wctx.walk(matchmod.badmatch(match, badfn)):
2194 2201 exact = match.exact(f)
2195 2202 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2196 2203 if cca:
2197 2204 cca(f)
2198 2205 names.append(f)
2199 2206 if ui.verbose or not exact:
2200 2207 ui.status(_('adding %s\n') % match.rel(f))
2201 2208
2202 2209 for subpath in sorted(wctx.substate):
2203 2210 sub = wctx.sub(subpath)
2204 2211 try:
2205 2212 submatch = matchmod.narrowmatcher(subpath, match)
2206 2213 if opts.get('subrepos'):
2207 2214 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2208 2215 else:
2209 2216 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2210 2217 except error.LookupError:
2211 2218 ui.status(_("skipping missing subrepository: %s\n")
2212 2219 % join(subpath))
2213 2220
2214 2221 if not opts.get('dry_run'):
2215 2222 rejected = wctx.add(names, prefix)
2216 2223 bad.extend(f for f in rejected if f in match.files())
2217 2224 return bad
2218 2225
2219 2226 def forget(ui, repo, match, prefix, explicitonly):
2220 2227 join = lambda f: os.path.join(prefix, f)
2221 2228 bad = []
2222 2229 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2223 2230 wctx = repo[None]
2224 2231 forgot = []
2225 2232
2226 2233 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2227 2234 forget = sorted(s[0] + s[1] + s[3] + s[6])
2228 2235 if explicitonly:
2229 2236 forget = [f for f in forget if match.exact(f)]
2230 2237
2231 2238 for subpath in sorted(wctx.substate):
2232 2239 sub = wctx.sub(subpath)
2233 2240 try:
2234 2241 submatch = matchmod.narrowmatcher(subpath, match)
2235 2242 subbad, subforgot = sub.forget(submatch, prefix)
2236 2243 bad.extend([subpath + '/' + f for f in subbad])
2237 2244 forgot.extend([subpath + '/' + f for f in subforgot])
2238 2245 except error.LookupError:
2239 2246 ui.status(_("skipping missing subrepository: %s\n")
2240 2247 % join(subpath))
2241 2248
2242 2249 if not explicitonly:
2243 2250 for f in match.files():
2244 2251 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2245 2252 if f not in forgot:
2246 2253 if repo.wvfs.exists(f):
2247 2254 # Don't complain if the exact case match wasn't given.
2248 2255 # But don't do this until after checking 'forgot', so
2249 2256 # that subrepo files aren't normalized, and this op is
2250 2257 # purely from data cached by the status walk above.
2251 2258 if repo.dirstate.normalize(f) in repo.dirstate:
2252 2259 continue
2253 2260 ui.warn(_('not removing %s: '
2254 2261 'file is already untracked\n')
2255 2262 % match.rel(f))
2256 2263 bad.append(f)
2257 2264
2258 2265 for f in forget:
2259 2266 if ui.verbose or not match.exact(f):
2260 2267 ui.status(_('removing %s\n') % match.rel(f))
2261 2268
2262 2269 rejected = wctx.forget(forget, prefix)
2263 2270 bad.extend(f for f in rejected if f in match.files())
2264 2271 forgot.extend(f for f in forget if f not in rejected)
2265 2272 return bad, forgot
2266 2273
2267 2274 def files(ui, ctx, m, fm, fmt, subrepos):
2268 2275 rev = ctx.rev()
2269 2276 ret = 1
2270 2277 ds = ctx.repo().dirstate
2271 2278
2272 2279 for f in ctx.matches(m):
2273 2280 if rev is None and ds[f] == 'r':
2274 2281 continue
2275 2282 fm.startitem()
2276 2283 if ui.verbose:
2277 2284 fc = ctx[f]
2278 2285 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2279 2286 fm.data(abspath=f)
2280 2287 fm.write('path', fmt, m.rel(f))
2281 2288 ret = 0
2282 2289
2283 2290 for subpath in sorted(ctx.substate):
2284 2291 def matchessubrepo(subpath):
2285 2292 return (m.always() or m.exact(subpath)
2286 2293 or any(f.startswith(subpath + '/') for f in m.files()))
2287 2294
2288 2295 if subrepos or matchessubrepo(subpath):
2289 2296 sub = ctx.sub(subpath)
2290 2297 try:
2291 2298 submatch = matchmod.narrowmatcher(subpath, m)
2292 2299 if sub.printfiles(ui, submatch, fm, fmt, subrepos) == 0:
2293 2300 ret = 0
2294 2301 except error.LookupError:
2295 2302 ui.status(_("skipping missing subrepository: %s\n")
2296 2303 % m.abs(subpath))
2297 2304
2298 2305 return ret
2299 2306
2300 2307 def remove(ui, repo, m, prefix, after, force, subrepos):
2301 2308 join = lambda f: os.path.join(prefix, f)
2302 2309 ret = 0
2303 2310 s = repo.status(match=m, clean=True)
2304 2311 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2305 2312
2306 2313 wctx = repo[None]
2307 2314
2308 2315 for subpath in sorted(wctx.substate):
2309 2316 def matchessubrepo(matcher, subpath):
2310 2317 if matcher.exact(subpath):
2311 2318 return True
2312 2319 for f in matcher.files():
2313 2320 if f.startswith(subpath):
2314 2321 return True
2315 2322 return False
2316 2323
2317 2324 if subrepos or matchessubrepo(m, subpath):
2318 2325 sub = wctx.sub(subpath)
2319 2326 try:
2320 2327 submatch = matchmod.narrowmatcher(subpath, m)
2321 2328 if sub.removefiles(submatch, prefix, after, force, subrepos):
2322 2329 ret = 1
2323 2330 except error.LookupError:
2324 2331 ui.status(_("skipping missing subrepository: %s\n")
2325 2332 % join(subpath))
2326 2333
2327 2334 # warn about failure to delete explicit files/dirs
2328 2335 deleteddirs = util.dirs(deleted)
2329 2336 for f in m.files():
2330 2337 def insubrepo():
2331 2338 for subpath in wctx.substate:
2332 2339 if f.startswith(subpath):
2333 2340 return True
2334 2341 return False
2335 2342
2336 2343 isdir = f in deleteddirs or wctx.hasdir(f)
2337 2344 if f in repo.dirstate or isdir or f == '.' or insubrepo():
2338 2345 continue
2339 2346
2340 2347 if repo.wvfs.exists(f):
2341 2348 if repo.wvfs.isdir(f):
2342 2349 ui.warn(_('not removing %s: no tracked files\n')
2343 2350 % m.rel(f))
2344 2351 else:
2345 2352 ui.warn(_('not removing %s: file is untracked\n')
2346 2353 % m.rel(f))
2347 2354 # missing files will generate a warning elsewhere
2348 2355 ret = 1
2349 2356
2350 2357 if force:
2351 2358 list = modified + deleted + clean + added
2352 2359 elif after:
2353 2360 list = deleted
2354 2361 for f in modified + added + clean:
2355 2362 ui.warn(_('not removing %s: file still exists\n') % m.rel(f))
2356 2363 ret = 1
2357 2364 else:
2358 2365 list = deleted + clean
2359 2366 for f in modified:
2360 2367 ui.warn(_('not removing %s: file is modified (use -f'
2361 2368 ' to force removal)\n') % m.rel(f))
2362 2369 ret = 1
2363 2370 for f in added:
2364 2371 ui.warn(_('not removing %s: file has been marked for add'
2365 2372 ' (use forget to undo)\n') % m.rel(f))
2366 2373 ret = 1
2367 2374
2368 2375 for f in sorted(list):
2369 2376 if ui.verbose or not m.exact(f):
2370 2377 ui.status(_('removing %s\n') % m.rel(f))
2371 2378
2372 2379 wlock = repo.wlock()
2373 2380 try:
2374 2381 if not after:
2375 2382 for f in list:
2376 2383 if f in added:
2377 2384 continue # we never unlink added files on remove
2378 2385 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
2379 2386 repo[None].forget(list)
2380 2387 finally:
2381 2388 wlock.release()
2382 2389
2383 2390 return ret
2384 2391
2385 2392 def cat(ui, repo, ctx, matcher, prefix, **opts):
2386 2393 err = 1
2387 2394
2388 2395 def write(path):
2389 2396 fp = makefileobj(repo, opts.get('output'), ctx.node(),
2390 2397 pathname=os.path.join(prefix, path))
2391 2398 data = ctx[path].data()
2392 2399 if opts.get('decode'):
2393 2400 data = repo.wwritedata(path, data)
2394 2401 fp.write(data)
2395 2402 fp.close()
2396 2403
2397 2404 # Automation often uses hg cat on single files, so special case it
2398 2405 # for performance to avoid the cost of parsing the manifest.
2399 2406 if len(matcher.files()) == 1 and not matcher.anypats():
2400 2407 file = matcher.files()[0]
2401 2408 mf = repo.manifest
2402 2409 mfnode = ctx.manifestnode()
2403 2410 if mfnode and mf.find(mfnode, file)[0]:
2404 2411 write(file)
2405 2412 return 0
2406 2413
2407 2414 # Don't warn about "missing" files that are really in subrepos
2408 2415 def badfn(path, msg):
2409 2416 for subpath in ctx.substate:
2410 2417 if path.startswith(subpath):
2411 2418 return
2412 2419 matcher.bad(path, msg)
2413 2420
2414 2421 for abs in ctx.walk(matchmod.badmatch(matcher, badfn)):
2415 2422 write(abs)
2416 2423 err = 0
2417 2424
2418 2425 for subpath in sorted(ctx.substate):
2419 2426 sub = ctx.sub(subpath)
2420 2427 try:
2421 2428 submatch = matchmod.narrowmatcher(subpath, matcher)
2422 2429
2423 2430 if not sub.cat(submatch, os.path.join(prefix, sub._path),
2424 2431 **opts):
2425 2432 err = 0
2426 2433 except error.RepoLookupError:
2427 2434 ui.status(_("skipping missing subrepository: %s\n")
2428 2435 % os.path.join(prefix, subpath))
2429 2436
2430 2437 return err
2431 2438
2432 2439 def commit(ui, repo, commitfunc, pats, opts):
2433 2440 '''commit the specified files or all outstanding changes'''
2434 2441 date = opts.get('date')
2435 2442 if date:
2436 2443 opts['date'] = util.parsedate(date)
2437 2444 message = logmessage(ui, opts)
2438 2445 matcher = scmutil.match(repo[None], pats, opts)
2439 2446
2440 2447 # extract addremove carefully -- this function can be called from a command
2441 2448 # that doesn't support addremove
2442 2449 if opts.get('addremove'):
2443 2450 if scmutil.addremove(repo, matcher, "", opts) != 0:
2444 2451 raise util.Abort(
2445 2452 _("failed to mark all new/missing files as added/removed"))
2446 2453
2447 2454 return commitfunc(ui, repo, message, matcher, opts)
2448 2455
2449 2456 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2450 2457 # amend will reuse the existing user if not specified, but the obsolete
2451 2458 # marker creation requires that the current user's name is specified.
2452 2459 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2453 2460 ui.username() # raise exception if username not set
2454 2461
2455 2462 ui.note(_('amending changeset %s\n') % old)
2456 2463 base = old.p1()
2457 2464 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2458 2465
2459 2466 wlock = dsguard = lock = newid = None
2460 2467 try:
2461 2468 wlock = repo.wlock()
2462 2469 dsguard = dirstateguard(repo, 'amend')
2463 2470 lock = repo.lock()
2464 2471 tr = repo.transaction('amend')
2465 2472 try:
2466 2473 # See if we got a message from -m or -l, if not, open the editor
2467 2474 # with the message of the changeset to amend
2468 2475 message = logmessage(ui, opts)
2469 2476 # ensure logfile does not conflict with later enforcement of the
2470 2477 # message. potential logfile content has been processed by
2471 2478 # `logmessage` anyway.
2472 2479 opts.pop('logfile')
2473 2480 # First, do a regular commit to record all changes in the working
2474 2481 # directory (if there are any)
2475 2482 ui.callhooks = False
2476 2483 activebookmark = repo._activebookmark
2477 2484 try:
2478 2485 repo._activebookmark = None
2479 2486 opts['message'] = 'temporary amend commit for %s' % old
2480 2487 node = commit(ui, repo, commitfunc, pats, opts)
2481 2488 finally:
2482 2489 repo._activebookmark = activebookmark
2483 2490 ui.callhooks = True
2484 2491 ctx = repo[node]
2485 2492
2486 2493 # Participating changesets:
2487 2494 #
2488 2495 # node/ctx o - new (intermediate) commit that contains changes
2489 2496 # | from working dir to go into amending commit
2490 2497 # | (or a workingctx if there were no changes)
2491 2498 # |
2492 2499 # old o - changeset to amend
2493 2500 # |
2494 2501 # base o - parent of amending changeset
2495 2502
2496 2503 # Update extra dict from amended commit (e.g. to preserve graft
2497 2504 # source)
2498 2505 extra.update(old.extra())
2499 2506
2500 2507 # Also update it from the intermediate commit or from the wctx
2501 2508 extra.update(ctx.extra())
2502 2509
2503 2510 if len(old.parents()) > 1:
2504 2511 # ctx.files() isn't reliable for merges, so fall back to the
2505 2512 # slower repo.status() method
2506 2513 files = set([fn for st in repo.status(base, old)[:3]
2507 2514 for fn in st])
2508 2515 else:
2509 2516 files = set(old.files())
2510 2517
2511 2518 # Second, we use either the commit we just did, or if there were no
2512 2519 # changes the parent of the working directory as the version of the
2513 2520 # files in the final amend commit
2514 2521 if node:
2515 2522 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2516 2523
2517 2524 user = ctx.user()
2518 2525 date = ctx.date()
2519 2526 # Recompute copies (avoid recording a -> b -> a)
2520 2527 copied = copies.pathcopies(base, ctx)
2521 2528 if old.p2:
2522 2529 copied.update(copies.pathcopies(old.p2(), ctx))
2523 2530
2524 2531 # Prune files which were reverted by the updates: if old
2525 2532 # introduced file X and our intermediate commit, node,
2526 2533 # renamed that file, then those two files are the same and
2527 2534 # we can discard X from our list of files. Likewise if X
2528 2535 # was deleted, it's no longer relevant
2529 2536 files.update(ctx.files())
2530 2537
2531 2538 def samefile(f):
2532 2539 if f in ctx.manifest():
2533 2540 a = ctx.filectx(f)
2534 2541 if f in base.manifest():
2535 2542 b = base.filectx(f)
2536 2543 return (not a.cmp(b)
2537 2544 and a.flags() == b.flags())
2538 2545 else:
2539 2546 return False
2540 2547 else:
2541 2548 return f not in base.manifest()
2542 2549 files = [f for f in files if not samefile(f)]
2543 2550
2544 2551 def filectxfn(repo, ctx_, path):
2545 2552 try:
2546 2553 fctx = ctx[path]
2547 2554 flags = fctx.flags()
2548 2555 mctx = context.memfilectx(repo,
2549 2556 fctx.path(), fctx.data(),
2550 2557 islink='l' in flags,
2551 2558 isexec='x' in flags,
2552 2559 copied=copied.get(path))
2553 2560 return mctx
2554 2561 except KeyError:
2555 2562 return None
2556 2563 else:
2557 2564 ui.note(_('copying changeset %s to %s\n') % (old, base))
2558 2565
2559 2566 # Use version of files as in the old cset
2560 2567 def filectxfn(repo, ctx_, path):
2561 2568 try:
2562 2569 return old.filectx(path)
2563 2570 except KeyError:
2564 2571 return None
2565 2572
2566 2573 user = opts.get('user') or old.user()
2567 2574 date = opts.get('date') or old.date()
2568 2575 editform = mergeeditform(old, 'commit.amend')
2569 2576 editor = getcommiteditor(editform=editform, **opts)
2570 2577 if not message:
2571 2578 editor = getcommiteditor(edit=True, editform=editform)
2572 2579 message = old.description()
2573 2580
2574 2581 pureextra = extra.copy()
2575 2582 extra['amend_source'] = old.hex()
2576 2583
2577 2584 new = context.memctx(repo,
2578 2585 parents=[base.node(), old.p2().node()],
2579 2586 text=message,
2580 2587 files=files,
2581 2588 filectxfn=filectxfn,
2582 2589 user=user,
2583 2590 date=date,
2584 2591 extra=extra,
2585 2592 editor=editor)
2586 2593
2587 2594 newdesc = changelog.stripdesc(new.description())
2588 2595 if ((not node)
2589 2596 and newdesc == old.description()
2590 2597 and user == old.user()
2591 2598 and date == old.date()
2592 2599 and pureextra == old.extra()):
2593 2600 # nothing changed. continuing here would create a new node
2594 2601 # anyway because of the amend_source noise.
2595 2602 #
2596 2603 # This not what we expect from amend.
2597 2604 return old.node()
2598 2605
2599 2606 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2600 2607 try:
2601 2608 if opts.get('secret'):
2602 2609 commitphase = 'secret'
2603 2610 else:
2604 2611 commitphase = old.phase()
2605 2612 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2606 2613 newid = repo.commitctx(new)
2607 2614 finally:
2608 2615 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2609 2616 if newid != old.node():
2610 2617 # Reroute the working copy parent to the new changeset
2611 2618 repo.setparents(newid, nullid)
2612 2619
2613 2620 # Move bookmarks from old parent to amend commit
2614 2621 bms = repo.nodebookmarks(old.node())
2615 2622 if bms:
2616 2623 marks = repo._bookmarks
2617 2624 for bm in bms:
2618 2625 ui.debug('moving bookmarks %r from %s to %s\n' %
2619 2626 (marks, old.hex(), hex(newid)))
2620 2627 marks[bm] = newid
2621 2628 marks.recordchange(tr)
2622 2629 #commit the whole amend process
2623 2630 if createmarkers:
2624 2631 # mark the new changeset as successor of the rewritten one
2625 2632 new = repo[newid]
2626 2633 obs = [(old, (new,))]
2627 2634 if node:
2628 2635 obs.append((ctx, ()))
2629 2636
2630 2637 obsolete.createmarkers(repo, obs)
2631 2638 tr.close()
2632 2639 finally:
2633 2640 tr.release()
2634 2641 dsguard.close()
2635 2642 if not createmarkers and newid != old.node():
2636 2643 # Strip the intermediate commit (if there was one) and the amended
2637 2644 # commit
2638 2645 if node:
2639 2646 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2640 2647 ui.note(_('stripping amended changeset %s\n') % old)
2641 2648 repair.strip(ui, repo, old.node(), topic='amend-backup')
2642 2649 finally:
2643 2650 lockmod.release(lock, dsguard, wlock)
2644 2651 return newid
2645 2652
2646 2653 def commiteditor(repo, ctx, subs, editform=''):
2647 2654 if ctx.description():
2648 2655 return ctx.description()
2649 2656 return commitforceeditor(repo, ctx, subs, editform=editform)
2650 2657
2651 2658 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2652 2659 editform=''):
2653 2660 if not extramsg:
2654 2661 extramsg = _("Leave message empty to abort commit.")
2655 2662
2656 2663 forms = [e for e in editform.split('.') if e]
2657 2664 forms.insert(0, 'changeset')
2658 2665 while forms:
2659 2666 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2660 2667 if tmpl:
2661 2668 committext = buildcommittemplate(repo, ctx, subs, extramsg, tmpl)
2662 2669 break
2663 2670 forms.pop()
2664 2671 else:
2665 2672 committext = buildcommittext(repo, ctx, subs, extramsg)
2666 2673
2667 2674 # run editor in the repository root
2668 2675 olddir = os.getcwd()
2669 2676 os.chdir(repo.root)
2670 2677 text = repo.ui.edit(committext, ctx.user(), ctx.extra(), editform=editform)
2671 2678 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2672 2679 os.chdir(olddir)
2673 2680
2674 2681 if finishdesc:
2675 2682 text = finishdesc(text)
2676 2683 if not text.strip():
2677 2684 raise util.Abort(_("empty commit message"))
2678 2685
2679 2686 return text
2680 2687
2681 2688 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2682 2689 ui = repo.ui
2683 2690 tmpl, mapfile = gettemplate(ui, tmpl, None)
2684 2691
2685 2692 try:
2686 2693 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2687 2694 except SyntaxError as inst:
2688 2695 raise util.Abort(inst.args[0])
2689 2696
2690 2697 for k, v in repo.ui.configitems('committemplate'):
2691 2698 if k != 'changeset':
2692 2699 t.t.cache[k] = v
2693 2700
2694 2701 if not extramsg:
2695 2702 extramsg = '' # ensure that extramsg is string
2696 2703
2697 2704 ui.pushbuffer()
2698 2705 t.show(ctx, extramsg=extramsg)
2699 2706 return ui.popbuffer()
2700 2707
2701 2708 def buildcommittext(repo, ctx, subs, extramsg):
2702 2709 edittext = []
2703 2710 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2704 2711 if ctx.description():
2705 2712 edittext.append(ctx.description())
2706 2713 edittext.append("")
2707 2714 edittext.append("") # Empty line between message and comments.
2708 2715 edittext.append(_("HG: Enter commit message."
2709 2716 " Lines beginning with 'HG:' are removed."))
2710 2717 edittext.append("HG: %s" % extramsg)
2711 2718 edittext.append("HG: --")
2712 2719 edittext.append(_("HG: user: %s") % ctx.user())
2713 2720 if ctx.p2():
2714 2721 edittext.append(_("HG: branch merge"))
2715 2722 if ctx.branch():
2716 2723 edittext.append(_("HG: branch '%s'") % ctx.branch())
2717 2724 if bookmarks.isactivewdirparent(repo):
2718 2725 edittext.append(_("HG: bookmark '%s'") % repo._activebookmark)
2719 2726 edittext.extend([_("HG: subrepo %s") % s for s in subs])
2720 2727 edittext.extend([_("HG: added %s") % f for f in added])
2721 2728 edittext.extend([_("HG: changed %s") % f for f in modified])
2722 2729 edittext.extend([_("HG: removed %s") % f for f in removed])
2723 2730 if not added and not modified and not removed:
2724 2731 edittext.append(_("HG: no files changed"))
2725 2732 edittext.append("")
2726 2733
2727 2734 return "\n".join(edittext)
2728 2735
2729 2736 def commitstatus(repo, node, branch, bheads=None, opts={}):
2730 2737 ctx = repo[node]
2731 2738 parents = ctx.parents()
2732 2739
2733 2740 if (not opts.get('amend') and bheads and node not in bheads and not
2734 2741 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2735 2742 repo.ui.status(_('created new head\n'))
2736 2743 # The message is not printed for initial roots. For the other
2737 2744 # changesets, it is printed in the following situations:
2738 2745 #
2739 2746 # Par column: for the 2 parents with ...
2740 2747 # N: null or no parent
2741 2748 # B: parent is on another named branch
2742 2749 # C: parent is a regular non head changeset
2743 2750 # H: parent was a branch head of the current branch
2744 2751 # Msg column: whether we print "created new head" message
2745 2752 # In the following, it is assumed that there already exists some
2746 2753 # initial branch heads of the current branch, otherwise nothing is
2747 2754 # printed anyway.
2748 2755 #
2749 2756 # Par Msg Comment
2750 2757 # N N y additional topo root
2751 2758 #
2752 2759 # B N y additional branch root
2753 2760 # C N y additional topo head
2754 2761 # H N n usual case
2755 2762 #
2756 2763 # B B y weird additional branch root
2757 2764 # C B y branch merge
2758 2765 # H B n merge with named branch
2759 2766 #
2760 2767 # C C y additional head from merge
2761 2768 # C H n merge with a head
2762 2769 #
2763 2770 # H H n head merge: head count decreases
2764 2771
2765 2772 if not opts.get('close_branch'):
2766 2773 for r in parents:
2767 2774 if r.closesbranch() and r.branch() == branch:
2768 2775 repo.ui.status(_('reopening closed branch head %d\n') % r)
2769 2776
2770 2777 if repo.ui.debugflag:
2771 2778 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2772 2779 elif repo.ui.verbose:
2773 2780 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2774 2781
2775 2782 def revert(ui, repo, ctx, parents, *pats, **opts):
2776 2783 parent, p2 = parents
2777 2784 node = ctx.node()
2778 2785
2779 2786 mf = ctx.manifest()
2780 2787 if node == p2:
2781 2788 parent = p2
2782 2789 if node == parent:
2783 2790 pmf = mf
2784 2791 else:
2785 2792 pmf = None
2786 2793
2787 2794 # need all matching names in dirstate and manifest of target rev,
2788 2795 # so have to walk both. do not print errors if files exist in one
2789 2796 # but not other. in both cases, filesets should be evaluated against
2790 2797 # workingctx to get consistent result (issue4497). this means 'set:**'
2791 2798 # cannot be used to select missing files from target rev.
2792 2799
2793 2800 # `names` is a mapping for all elements in working copy and target revision
2794 2801 # The mapping is in the form:
2795 2802 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2796 2803 names = {}
2797 2804
2798 2805 wlock = repo.wlock()
2799 2806 try:
2800 2807 ## filling of the `names` mapping
2801 2808 # walk dirstate to fill `names`
2802 2809
2803 2810 interactive = opts.get('interactive', False)
2804 2811 wctx = repo[None]
2805 2812 m = scmutil.match(wctx, pats, opts)
2806 2813
2807 2814 # we'll need this later
2808 2815 targetsubs = sorted(s for s in wctx.substate if m(s))
2809 2816
2810 2817 if not m.always():
2811 2818 for abs in repo.walk(matchmod.badmatch(m, lambda x, y: False)):
2812 2819 names[abs] = m.rel(abs), m.exact(abs)
2813 2820
2814 2821 # walk target manifest to fill `names`
2815 2822
2816 2823 def badfn(path, msg):
2817 2824 if path in names:
2818 2825 return
2819 2826 if path in ctx.substate:
2820 2827 return
2821 2828 path_ = path + '/'
2822 2829 for f in names:
2823 2830 if f.startswith(path_):
2824 2831 return
2825 2832 ui.warn("%s: %s\n" % (m.rel(path), msg))
2826 2833
2827 2834 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
2828 2835 if abs not in names:
2829 2836 names[abs] = m.rel(abs), m.exact(abs)
2830 2837
2831 2838 # Find status of all file in `names`.
2832 2839 m = scmutil.matchfiles(repo, names)
2833 2840
2834 2841 changes = repo.status(node1=node, match=m,
2835 2842 unknown=True, ignored=True, clean=True)
2836 2843 else:
2837 2844 changes = repo.status(node1=node, match=m)
2838 2845 for kind in changes:
2839 2846 for abs in kind:
2840 2847 names[abs] = m.rel(abs), m.exact(abs)
2841 2848
2842 2849 m = scmutil.matchfiles(repo, names)
2843 2850
2844 2851 modified = set(changes.modified)
2845 2852 added = set(changes.added)
2846 2853 removed = set(changes.removed)
2847 2854 _deleted = set(changes.deleted)
2848 2855 unknown = set(changes.unknown)
2849 2856 unknown.update(changes.ignored)
2850 2857 clean = set(changes.clean)
2851 2858 modadded = set()
2852 2859
2853 2860 # split between files known in target manifest and the others
2854 2861 smf = set(mf)
2855 2862
2856 2863 # determine the exact nature of the deleted changesets
2857 2864 deladded = _deleted - smf
2858 2865 deleted = _deleted - deladded
2859 2866
2860 2867 # We need to account for the state of the file in the dirstate,
2861 2868 # even when we revert against something else than parent. This will
2862 2869 # slightly alter the behavior of revert (doing back up or not, delete
2863 2870 # or just forget etc).
2864 2871 if parent == node:
2865 2872 dsmodified = modified
2866 2873 dsadded = added
2867 2874 dsremoved = removed
2868 2875 # store all local modifications, useful later for rename detection
2869 2876 localchanges = dsmodified | dsadded
2870 2877 modified, added, removed = set(), set(), set()
2871 2878 else:
2872 2879 changes = repo.status(node1=parent, match=m)
2873 2880 dsmodified = set(changes.modified)
2874 2881 dsadded = set(changes.added)
2875 2882 dsremoved = set(changes.removed)
2876 2883 # store all local modifications, useful later for rename detection
2877 2884 localchanges = dsmodified | dsadded
2878 2885
2879 2886 # only take into account for removes between wc and target
2880 2887 clean |= dsremoved - removed
2881 2888 dsremoved &= removed
2882 2889 # distinct between dirstate remove and other
2883 2890 removed -= dsremoved
2884 2891
2885 2892 modadded = added & dsmodified
2886 2893 added -= modadded
2887 2894
2888 2895 # tell newly modified apart.
2889 2896 dsmodified &= modified
2890 2897 dsmodified |= modified & dsadded # dirstate added may needs backup
2891 2898 modified -= dsmodified
2892 2899
2893 2900 # We need to wait for some post-processing to update this set
2894 2901 # before making the distinction. The dirstate will be used for
2895 2902 # that purpose.
2896 2903 dsadded = added
2897 2904
2898 2905 # in case of merge, files that are actually added can be reported as
2899 2906 # modified, we need to post process the result
2900 2907 if p2 != nullid:
2901 2908 if pmf is None:
2902 2909 # only need parent manifest in the merge case,
2903 2910 # so do not read by default
2904 2911 pmf = repo[parent].manifest()
2905 2912 mergeadd = dsmodified - set(pmf)
2906 2913 dsadded |= mergeadd
2907 2914 dsmodified -= mergeadd
2908 2915
2909 2916 # if f is a rename, update `names` to also revert the source
2910 2917 cwd = repo.getcwd()
2911 2918 for f in localchanges:
2912 2919 src = repo.dirstate.copied(f)
2913 2920 # XXX should we check for rename down to target node?
2914 2921 if src and src not in names and repo.dirstate[src] == 'r':
2915 2922 dsremoved.add(src)
2916 2923 names[src] = (repo.pathto(src, cwd), True)
2917 2924
2918 2925 # distinguish between file to forget and the other
2919 2926 added = set()
2920 2927 for abs in dsadded:
2921 2928 if repo.dirstate[abs] != 'a':
2922 2929 added.add(abs)
2923 2930 dsadded -= added
2924 2931
2925 2932 for abs in deladded:
2926 2933 if repo.dirstate[abs] == 'a':
2927 2934 dsadded.add(abs)
2928 2935 deladded -= dsadded
2929 2936
2930 2937 # For files marked as removed, we check if an unknown file is present at
2931 2938 # the same path. If a such file exists it may need to be backed up.
2932 2939 # Making the distinction at this stage helps have simpler backup
2933 2940 # logic.
2934 2941 removunk = set()
2935 2942 for abs in removed:
2936 2943 target = repo.wjoin(abs)
2937 2944 if os.path.lexists(target):
2938 2945 removunk.add(abs)
2939 2946 removed -= removunk
2940 2947
2941 2948 dsremovunk = set()
2942 2949 for abs in dsremoved:
2943 2950 target = repo.wjoin(abs)
2944 2951 if os.path.lexists(target):
2945 2952 dsremovunk.add(abs)
2946 2953 dsremoved -= dsremovunk
2947 2954
2948 2955 # action to be actually performed by revert
2949 2956 # (<list of file>, message>) tuple
2950 2957 actions = {'revert': ([], _('reverting %s\n')),
2951 2958 'add': ([], _('adding %s\n')),
2952 2959 'remove': ([], _('removing %s\n')),
2953 2960 'drop': ([], _('removing %s\n')),
2954 2961 'forget': ([], _('forgetting %s\n')),
2955 2962 'undelete': ([], _('undeleting %s\n')),
2956 2963 'noop': (None, _('no changes needed to %s\n')),
2957 2964 'unknown': (None, _('file not managed: %s\n')),
2958 2965 }
2959 2966
2960 2967 # "constant" that convey the backup strategy.
2961 2968 # All set to `discard` if `no-backup` is set do avoid checking
2962 2969 # no_backup lower in the code.
2963 2970 # These values are ordered for comparison purposes
2964 2971 backup = 2 # unconditionally do backup
2965 2972 check = 1 # check if the existing file differs from target
2966 2973 discard = 0 # never do backup
2967 2974 if opts.get('no_backup'):
2968 2975 backup = check = discard
2969 2976
2970 2977 backupanddel = actions['remove']
2971 2978 if not opts.get('no_backup'):
2972 2979 backupanddel = actions['drop']
2973 2980
2974 2981 disptable = (
2975 2982 # dispatch table:
2976 2983 # file state
2977 2984 # action
2978 2985 # make backup
2979 2986
2980 2987 ## Sets that results that will change file on disk
2981 2988 # Modified compared to target, no local change
2982 2989 (modified, actions['revert'], discard),
2983 2990 # Modified compared to target, but local file is deleted
2984 2991 (deleted, actions['revert'], discard),
2985 2992 # Modified compared to target, local change
2986 2993 (dsmodified, actions['revert'], backup),
2987 2994 # Added since target
2988 2995 (added, actions['remove'], discard),
2989 2996 # Added in working directory
2990 2997 (dsadded, actions['forget'], discard),
2991 2998 # Added since target, have local modification
2992 2999 (modadded, backupanddel, backup),
2993 3000 # Added since target but file is missing in working directory
2994 3001 (deladded, actions['drop'], discard),
2995 3002 # Removed since target, before working copy parent
2996 3003 (removed, actions['add'], discard),
2997 3004 # Same as `removed` but an unknown file exists at the same path
2998 3005 (removunk, actions['add'], check),
2999 3006 # Removed since targe, marked as such in working copy parent
3000 3007 (dsremoved, actions['undelete'], discard),
3001 3008 # Same as `dsremoved` but an unknown file exists at the same path
3002 3009 (dsremovunk, actions['undelete'], check),
3003 3010 ## the following sets does not result in any file changes
3004 3011 # File with no modification
3005 3012 (clean, actions['noop'], discard),
3006 3013 # Existing file, not tracked anywhere
3007 3014 (unknown, actions['unknown'], discard),
3008 3015 )
3009 3016
3010 3017 for abs, (rel, exact) in sorted(names.items()):
3011 3018 # target file to be touch on disk (relative to cwd)
3012 3019 target = repo.wjoin(abs)
3013 3020 # search the entry in the dispatch table.
3014 3021 # if the file is in any of these sets, it was touched in the working
3015 3022 # directory parent and we are sure it needs to be reverted.
3016 3023 for table, (xlist, msg), dobackup in disptable:
3017 3024 if abs not in table:
3018 3025 continue
3019 3026 if xlist is not None:
3020 3027 xlist.append(abs)
3021 3028 if dobackup and (backup <= dobackup
3022 3029 or wctx[abs].cmp(ctx[abs])):
3023 3030 bakname = "%s.orig" % rel
3024 3031 ui.note(_('saving current version of %s as %s\n') %
3025 3032 (rel, bakname))
3026 3033 if not opts.get('dry_run'):
3027 3034 if interactive:
3028 3035 util.copyfile(target, bakname)
3029 3036 else:
3030 3037 util.rename(target, bakname)
3031 3038 if ui.verbose or not exact:
3032 3039 if not isinstance(msg, basestring):
3033 3040 msg = msg(abs)
3034 3041 ui.status(msg % rel)
3035 3042 elif exact:
3036 3043 ui.warn(msg % rel)
3037 3044 break
3038 3045
3039 3046 if not opts.get('dry_run'):
3040 3047 needdata = ('revert', 'add', 'undelete')
3041 3048 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3042 3049 _performrevert(repo, parents, ctx, actions, interactive)
3043 3050
3044 3051 if targetsubs:
3045 3052 # Revert the subrepos on the revert list
3046 3053 for sub in targetsubs:
3047 3054 try:
3048 3055 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3049 3056 except KeyError:
3050 3057 raise util.Abort("subrepository '%s' does not exist in %s!"
3051 3058 % (sub, short(ctx.node())))
3052 3059 finally:
3053 3060 wlock.release()
3054 3061
3055 3062 def _revertprefetch(repo, ctx, *files):
3056 3063 """Let extension changing the storage layer prefetch content"""
3057 3064 pass
3058 3065
3059 3066 def _performrevert(repo, parents, ctx, actions, interactive=False):
3060 3067 """function that actually perform all the actions computed for revert
3061 3068
3062 3069 This is an independent function to let extension to plug in and react to
3063 3070 the imminent revert.
3064 3071
3065 3072 Make sure you have the working directory locked when calling this function.
3066 3073 """
3067 3074 parent, p2 = parents
3068 3075 node = ctx.node()
3069 3076 def checkout(f):
3070 3077 fc = ctx[f]
3071 3078 repo.wwrite(f, fc.data(), fc.flags())
3072 3079
3073 3080 audit_path = pathutil.pathauditor(repo.root)
3074 3081 for f in actions['forget'][0]:
3075 3082 repo.dirstate.drop(f)
3076 3083 for f in actions['remove'][0]:
3077 3084 audit_path(f)
3078 3085 try:
3079 3086 util.unlinkpath(repo.wjoin(f))
3080 3087 except OSError:
3081 3088 pass
3082 3089 repo.dirstate.remove(f)
3083 3090 for f in actions['drop'][0]:
3084 3091 audit_path(f)
3085 3092 repo.dirstate.remove(f)
3086 3093
3087 3094 normal = None
3088 3095 if node == parent:
3089 3096 # We're reverting to our parent. If possible, we'd like status
3090 3097 # to report the file as clean. We have to use normallookup for
3091 3098 # merges to avoid losing information about merged/dirty files.
3092 3099 if p2 != nullid:
3093 3100 normal = repo.dirstate.normallookup
3094 3101 else:
3095 3102 normal = repo.dirstate.normal
3096 3103
3097 3104 newlyaddedandmodifiedfiles = set()
3098 3105 if interactive:
3099 3106 # Prompt the user for changes to revert
3100 3107 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3101 3108 m = scmutil.match(ctx, torevert, {})
3102 3109 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3103 3110 diffopts.nodates = True
3104 3111 diffopts.git = True
3105 3112 reversehunks = repo.ui.configbool('experimental',
3106 3113 'revertalternateinteractivemode',
3107 3114 True)
3108 3115 if reversehunks:
3109 3116 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3110 3117 else:
3111 3118 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3112 3119 originalchunks = patch.parsepatch(diff)
3113 3120
3114 3121 try:
3115 3122
3116 3123 chunks = recordfilter(repo.ui, originalchunks)
3117 3124 if reversehunks:
3118 3125 chunks = patch.reversehunks(chunks)
3119 3126
3120 3127 except patch.PatchError as err:
3121 3128 raise util.Abort(_('error parsing patch: %s') % err)
3122 3129
3123 3130 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3124 3131 # Apply changes
3125 3132 fp = cStringIO.StringIO()
3126 3133 for c in chunks:
3127 3134 c.write(fp)
3128 3135 dopatch = fp.tell()
3129 3136 fp.seek(0)
3130 3137 if dopatch:
3131 3138 try:
3132 3139 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3133 3140 except patch.PatchError as err:
3134 3141 raise util.Abort(str(err))
3135 3142 del fp
3136 3143 else:
3137 3144 for f in actions['revert'][0]:
3138 3145 checkout(f)
3139 3146 if normal:
3140 3147 normal(f)
3141 3148
3142 3149 for f in actions['add'][0]:
3143 3150 # Don't checkout modified files, they are already created by the diff
3144 3151 if f not in newlyaddedandmodifiedfiles:
3145 3152 checkout(f)
3146 3153 repo.dirstate.add(f)
3147 3154
3148 3155 normal = repo.dirstate.normallookup
3149 3156 if node == parent and p2 == nullid:
3150 3157 normal = repo.dirstate.normal
3151 3158 for f in actions['undelete'][0]:
3152 3159 checkout(f)
3153 3160 normal(f)
3154 3161
3155 3162 copied = copies.pathcopies(repo[parent], ctx)
3156 3163
3157 3164 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3158 3165 if f in copied:
3159 3166 repo.dirstate.copy(copied[f], f)
3160 3167
3161 3168 def command(table):
3162 3169 """Returns a function object to be used as a decorator for making commands.
3163 3170
3164 3171 This function receives a command table as its argument. The table should
3165 3172 be a dict.
3166 3173
3167 3174 The returned function can be used as a decorator for adding commands
3168 3175 to that command table. This function accepts multiple arguments to define
3169 3176 a command.
3170 3177
3171 3178 The first argument is the command name.
3172 3179
3173 3180 The options argument is an iterable of tuples defining command arguments.
3174 3181 See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
3175 3182
3176 3183 The synopsis argument defines a short, one line summary of how to use the
3177 3184 command. This shows up in the help output.
3178 3185
3179 3186 The norepo argument defines whether the command does not require a
3180 3187 local repository. Most commands operate against a repository, thus the
3181 3188 default is False.
3182 3189
3183 3190 The optionalrepo argument defines whether the command optionally requires
3184 3191 a local repository.
3185 3192
3186 3193 The inferrepo argument defines whether to try to find a repository from the
3187 3194 command line arguments. If True, arguments will be examined for potential
3188 3195 repository locations. See ``findrepo()``. If a repository is found, it
3189 3196 will be used.
3190 3197 """
3191 3198 def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False,
3192 3199 inferrepo=False):
3193 3200 def decorator(func):
3194 3201 if synopsis:
3195 3202 table[name] = func, list(options), synopsis
3196 3203 else:
3197 3204 table[name] = func, list(options)
3198 3205
3199 3206 if norepo:
3200 3207 # Avoid import cycle.
3201 3208 import commands
3202 3209 commands.norepo += ' %s' % ' '.join(parsealiases(name))
3203 3210
3204 3211 if optionalrepo:
3205 3212 import commands
3206 3213 commands.optionalrepo += ' %s' % ' '.join(parsealiases(name))
3207 3214
3208 3215 if inferrepo:
3209 3216 import commands
3210 3217 commands.inferrepo += ' %s' % ' '.join(parsealiases(name))
3211 3218
3212 3219 return func
3213 3220 return decorator
3214 3221
3215 3222 return cmd
3216 3223
3217 3224 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3218 3225 # commands.outgoing. "missing" is "missing" of the result of
3219 3226 # "findcommonoutgoing()"
3220 3227 outgoinghooks = util.hooks()
3221 3228
3222 3229 # a list of (ui, repo) functions called by commands.summary
3223 3230 summaryhooks = util.hooks()
3224 3231
3225 3232 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3226 3233 #
3227 3234 # functions should return tuple of booleans below, if 'changes' is None:
3228 3235 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3229 3236 #
3230 3237 # otherwise, 'changes' is a tuple of tuples below:
3231 3238 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3232 3239 # - (desturl, destbranch, destpeer, outgoing)
3233 3240 summaryremotehooks = util.hooks()
3234 3241
3235 3242 # A list of state files kept by multistep operations like graft.
3236 3243 # Since graft cannot be aborted, it is considered 'clearable' by update.
3237 3244 # note: bisect is intentionally excluded
3238 3245 # (state file, clearable, allowcommit, error, hint)
3239 3246 unfinishedstates = [
3240 3247 ('graftstate', True, False, _('graft in progress'),
3241 3248 _("use 'hg graft --continue' or 'hg update' to abort")),
3242 3249 ('updatestate', True, False, _('last update was interrupted'),
3243 3250 _("use 'hg update' to get a consistent checkout"))
3244 3251 ]
3245 3252
3246 3253 def checkunfinished(repo, commit=False):
3247 3254 '''Look for an unfinished multistep operation, like graft, and abort
3248 3255 if found. It's probably good to check this right before
3249 3256 bailifchanged().
3250 3257 '''
3251 3258 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3252 3259 if commit and allowcommit:
3253 3260 continue
3254 3261 if repo.vfs.exists(f):
3255 3262 raise util.Abort(msg, hint=hint)
3256 3263
3257 3264 def clearunfinished(repo):
3258 3265 '''Check for unfinished operations (as above), and clear the ones
3259 3266 that are clearable.
3260 3267 '''
3261 3268 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3262 3269 if not clearable and repo.vfs.exists(f):
3263 3270 raise util.Abort(msg, hint=hint)
3264 3271 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3265 3272 if clearable and repo.vfs.exists(f):
3266 3273 util.unlink(repo.join(f))
3267 3274
3268 3275 class dirstateguard(object):
3269 3276 '''Restore dirstate at unexpected failure.
3270 3277
3271 3278 At the construction, this class does:
3272 3279
3273 3280 - write current ``repo.dirstate`` out, and
3274 3281 - save ``.hg/dirstate`` into the backup file
3275 3282
3276 3283 This restores ``.hg/dirstate`` from backup file, if ``release()``
3277 3284 is invoked before ``close()``.
3278 3285
3279 3286 This just removes the backup file at ``close()`` before ``release()``.
3280 3287 '''
3281 3288
3282 3289 def __init__(self, repo, name):
3283 3290 repo.dirstate.write()
3284 3291 self._repo = repo
3285 3292 self._filename = 'dirstate.backup.%s.%d' % (name, id(self))
3286 3293 repo.vfs.write(self._filename, repo.vfs.tryread('dirstate'))
3287 3294 self._active = True
3288 3295 self._closed = False
3289 3296
3290 3297 def __del__(self):
3291 3298 if self._active: # still active
3292 3299 # this may occur, even if this class is used correctly:
3293 3300 # for example, releasing other resources like transaction
3294 3301 # may raise exception before ``dirstateguard.release`` in
3295 3302 # ``release(tr, ....)``.
3296 3303 self._abort()
3297 3304
3298 3305 def close(self):
3299 3306 if not self._active: # already inactivated
3300 3307 msg = (_("can't close already inactivated backup: %s")
3301 3308 % self._filename)
3302 3309 raise util.Abort(msg)
3303 3310
3304 3311 self._repo.vfs.unlink(self._filename)
3305 3312 self._active = False
3306 3313 self._closed = True
3307 3314
3308 3315 def _abort(self):
3309 3316 # this "invalidate()" prevents "wlock.release()" from writing
3310 3317 # changes of dirstate out after restoring to original status
3311 3318 self._repo.dirstate.invalidate()
3312 3319
3313 3320 self._repo.vfs.rename(self._filename, 'dirstate')
3314 3321 self._active = False
3315 3322
3316 3323 def release(self):
3317 3324 if not self._closed:
3318 3325 if not self._active: # already inactivated
3319 3326 msg = (_("can't release already inactivated backup: %s")
3320 3327 % self._filename)
3321 3328 raise util.Abort(msg)
3322 3329 self._abort()
General Comments 0
You need to be logged in to leave comments. Login now