##// END OF EJS Templates
cmdutil: do not duplicate stdout by makefileobj()...
Yuya Nishihara -
r27419:7e2495bf default
parent child Browse files
Show More
@@ -1,3421 +1,3416 b''
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import hex, bin, nullid, nullrev, short
9 9 from i18n import _
10 10 import os, sys, errno, re, tempfile, cStringIO
11 11 import util, scmutil, templater, patch, error, templatekw, revlog, copies
12 12 import match as matchmod
13 13 import repair, graphmod, revset, phases, obsolete, pathutil
14 14 import changelog
15 15 import bookmarks
16 16 import encoding
17 17 import formatter
18 18 import crecord as crecordmod
19 19 import lock as lockmod
20 20
21 21 def ishunk(x):
22 22 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
23 23 return isinstance(x, hunkclasses)
24 24
25 25 def newandmodified(chunks, originalchunks):
26 26 newlyaddedandmodifiedfiles = set()
27 27 for chunk in chunks:
28 28 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
29 29 originalchunks:
30 30 newlyaddedandmodifiedfiles.add(chunk.header.filename())
31 31 return newlyaddedandmodifiedfiles
32 32
33 33 def parsealiases(cmd):
34 34 return cmd.lstrip("^").split("|")
35 35
36 36 def setupwrapcolorwrite(ui):
37 37 # wrap ui.write so diff output can be labeled/colorized
38 38 def wrapwrite(orig, *args, **kw):
39 39 label = kw.pop('label', '')
40 40 for chunk, l in patch.difflabel(lambda: args):
41 41 orig(chunk, label=label + l)
42 42
43 43 oldwrite = ui.write
44 44 def wrap(*args, **kwargs):
45 45 return wrapwrite(oldwrite, *args, **kwargs)
46 46 setattr(ui, 'write', wrap)
47 47 return oldwrite
48 48
49 49 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
50 50 if usecurses:
51 51 if testfile:
52 52 recordfn = crecordmod.testdecorator(testfile,
53 53 crecordmod.testchunkselector)
54 54 else:
55 55 recordfn = crecordmod.chunkselector
56 56
57 57 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
58 58
59 59 else:
60 60 return patch.filterpatch(ui, originalhunks, operation)
61 61
62 62 def recordfilter(ui, originalhunks, operation=None):
63 63 """ Prompts the user to filter the originalhunks and return a list of
64 64 selected hunks.
65 65 *operation* is used for ui purposes to indicate the user
66 66 what kind of filtering they are doing: reverting, committing, shelving, etc.
67 67 *operation* has to be a translated string.
68 68 """
69 69 usecurses = ui.configbool('experimental', 'crecord', False)
70 70 testfile = ui.config('experimental', 'crecordtest', None)
71 71 oldwrite = setupwrapcolorwrite(ui)
72 72 try:
73 73 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
74 74 testfile, operation)
75 75 finally:
76 76 ui.write = oldwrite
77 77 return newchunks, newopts
78 78
79 79 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
80 80 filterfn, *pats, **opts):
81 81 import merge as mergemod
82 82
83 83 if not ui.interactive():
84 84 if cmdsuggest:
85 85 msg = _('running non-interactively, use %s instead') % cmdsuggest
86 86 else:
87 87 msg = _('running non-interactively')
88 88 raise error.Abort(msg)
89 89
90 90 # make sure username is set before going interactive
91 91 if not opts.get('user'):
92 92 ui.username() # raise exception, username not provided
93 93
94 94 def recordfunc(ui, repo, message, match, opts):
95 95 """This is generic record driver.
96 96
97 97 Its job is to interactively filter local changes, and
98 98 accordingly prepare working directory into a state in which the
99 99 job can be delegated to a non-interactive commit command such as
100 100 'commit' or 'qrefresh'.
101 101
102 102 After the actual job is done by non-interactive command, the
103 103 working directory is restored to its original state.
104 104
105 105 In the end we'll record interesting changes, and everything else
106 106 will be left in place, so the user can continue working.
107 107 """
108 108
109 109 checkunfinished(repo, commit=True)
110 110 merge = len(repo[None].parents()) > 1
111 111 if merge:
112 112 raise error.Abort(_('cannot partially commit a merge '
113 113 '(use "hg commit" instead)'))
114 114
115 115 status = repo.status(match=match)
116 116 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
117 117 diffopts.nodates = True
118 118 diffopts.git = True
119 119 diffopts.showfunc = True
120 120 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
121 121 originalchunks = patch.parsepatch(originaldiff)
122 122
123 123 # 1. filter patch, so we have intending-to apply subset of it
124 124 try:
125 125 chunks, newopts = filterfn(ui, originalchunks)
126 126 except patch.PatchError as err:
127 127 raise error.Abort(_('error parsing patch: %s') % err)
128 128 opts.update(newopts)
129 129
130 130 # We need to keep a backup of files that have been newly added and
131 131 # modified during the recording process because there is a previous
132 132 # version without the edit in the workdir
133 133 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
134 134 contenders = set()
135 135 for h in chunks:
136 136 try:
137 137 contenders.update(set(h.files()))
138 138 except AttributeError:
139 139 pass
140 140
141 141 changed = status.modified + status.added + status.removed
142 142 newfiles = [f for f in changed if f in contenders]
143 143 if not newfiles:
144 144 ui.status(_('no changes to record\n'))
145 145 return 0
146 146
147 147 modified = set(status.modified)
148 148
149 149 # 2. backup changed files, so we can restore them in the end
150 150
151 151 if backupall:
152 152 tobackup = changed
153 153 else:
154 154 tobackup = [f for f in newfiles if f in modified or f in \
155 155 newlyaddedandmodifiedfiles]
156 156 backups = {}
157 157 if tobackup:
158 158 backupdir = repo.join('record-backups')
159 159 try:
160 160 os.mkdir(backupdir)
161 161 except OSError as err:
162 162 if err.errno != errno.EEXIST:
163 163 raise
164 164 try:
165 165 # backup continues
166 166 for f in tobackup:
167 167 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
168 168 dir=backupdir)
169 169 os.close(fd)
170 170 ui.debug('backup %r as %r\n' % (f, tmpname))
171 171 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
172 172 backups[f] = tmpname
173 173
174 174 fp = cStringIO.StringIO()
175 175 for c in chunks:
176 176 fname = c.filename()
177 177 if fname in backups:
178 178 c.write(fp)
179 179 dopatch = fp.tell()
180 180 fp.seek(0)
181 181
182 182 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
183 183 # 3a. apply filtered patch to clean repo (clean)
184 184 if backups:
185 185 # Equivalent to hg.revert
186 186 m = scmutil.matchfiles(repo, backups.keys())
187 187 mergemod.update(repo, repo.dirstate.p1(),
188 188 False, True, matcher=m)
189 189
190 190 # 3b. (apply)
191 191 if dopatch:
192 192 try:
193 193 ui.debug('applying patch\n')
194 194 ui.debug(fp.getvalue())
195 195 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
196 196 except patch.PatchError as err:
197 197 raise error.Abort(str(err))
198 198 del fp
199 199
200 200 # 4. We prepared working directory according to filtered
201 201 # patch. Now is the time to delegate the job to
202 202 # commit/qrefresh or the like!
203 203
204 204 # Make all of the pathnames absolute.
205 205 newfiles = [repo.wjoin(nf) for nf in newfiles]
206 206 return commitfunc(ui, repo, *newfiles, **opts)
207 207 finally:
208 208 # 5. finally restore backed-up files
209 209 try:
210 210 dirstate = repo.dirstate
211 211 for realname, tmpname in backups.iteritems():
212 212 ui.debug('restoring %r to %r\n' % (tmpname, realname))
213 213
214 214 if dirstate[realname] == 'n':
215 215 # without normallookup, restoring timestamp
216 216 # may cause partially committed files
217 217 # to be treated as unmodified
218 218 dirstate.normallookup(realname)
219 219
220 220 # copystat=True here and above are a hack to trick any
221 221 # editors that have f open that we haven't modified them.
222 222 #
223 223 # Also note that this racy as an editor could notice the
224 224 # file's mtime before we've finished writing it.
225 225 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
226 226 os.unlink(tmpname)
227 227 if tobackup:
228 228 os.rmdir(backupdir)
229 229 except OSError:
230 230 pass
231 231
232 232 def recordinwlock(ui, repo, message, match, opts):
233 233 wlock = repo.wlock()
234 234 try:
235 235 return recordfunc(ui, repo, message, match, opts)
236 236 finally:
237 237 wlock.release()
238 238
239 239 return commit(ui, repo, recordinwlock, pats, opts)
240 240
241 241 def findpossible(cmd, table, strict=False):
242 242 """
243 243 Return cmd -> (aliases, command table entry)
244 244 for each matching command.
245 245 Return debug commands (or their aliases) only if no normal command matches.
246 246 """
247 247 choice = {}
248 248 debugchoice = {}
249 249
250 250 if cmd in table:
251 251 # short-circuit exact matches, "log" alias beats "^log|history"
252 252 keys = [cmd]
253 253 else:
254 254 keys = table.keys()
255 255
256 256 allcmds = []
257 257 for e in keys:
258 258 aliases = parsealiases(e)
259 259 allcmds.extend(aliases)
260 260 found = None
261 261 if cmd in aliases:
262 262 found = cmd
263 263 elif not strict:
264 264 for a in aliases:
265 265 if a.startswith(cmd):
266 266 found = a
267 267 break
268 268 if found is not None:
269 269 if aliases[0].startswith("debug") or found.startswith("debug"):
270 270 debugchoice[found] = (aliases, table[e])
271 271 else:
272 272 choice[found] = (aliases, table[e])
273 273
274 274 if not choice and debugchoice:
275 275 choice = debugchoice
276 276
277 277 return choice, allcmds
278 278
279 279 def findcmd(cmd, table, strict=True):
280 280 """Return (aliases, command table entry) for command string."""
281 281 choice, allcmds = findpossible(cmd, table, strict)
282 282
283 283 if cmd in choice:
284 284 return choice[cmd]
285 285
286 286 if len(choice) > 1:
287 287 clist = choice.keys()
288 288 clist.sort()
289 289 raise error.AmbiguousCommand(cmd, clist)
290 290
291 291 if choice:
292 292 return choice.values()[0]
293 293
294 294 raise error.UnknownCommand(cmd, allcmds)
295 295
296 296 def findrepo(p):
297 297 while not os.path.isdir(os.path.join(p, ".hg")):
298 298 oldp, p = p, os.path.dirname(p)
299 299 if p == oldp:
300 300 return None
301 301
302 302 return p
303 303
304 304 def bailifchanged(repo, merge=True):
305 305 if merge and repo.dirstate.p2() != nullid:
306 306 raise error.Abort(_('outstanding uncommitted merge'))
307 307 modified, added, removed, deleted = repo.status()[:4]
308 308 if modified or added or removed or deleted:
309 309 raise error.Abort(_('uncommitted changes'))
310 310 ctx = repo[None]
311 311 for s in sorted(ctx.substate):
312 312 ctx.sub(s).bailifchanged()
313 313
314 314 def logmessage(ui, opts):
315 315 """ get the log message according to -m and -l option """
316 316 message = opts.get('message')
317 317 logfile = opts.get('logfile')
318 318
319 319 if message and logfile:
320 320 raise error.Abort(_('options --message and --logfile are mutually '
321 321 'exclusive'))
322 322 if not message and logfile:
323 323 try:
324 324 if logfile == '-':
325 325 message = ui.fin.read()
326 326 else:
327 327 message = '\n'.join(util.readfile(logfile).splitlines())
328 328 except IOError as inst:
329 329 raise error.Abort(_("can't read commit message '%s': %s") %
330 330 (logfile, inst.strerror))
331 331 return message
332 332
333 333 def mergeeditform(ctxorbool, baseformname):
334 334 """return appropriate editform name (referencing a committemplate)
335 335
336 336 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
337 337 merging is committed.
338 338
339 339 This returns baseformname with '.merge' appended if it is a merge,
340 340 otherwise '.normal' is appended.
341 341 """
342 342 if isinstance(ctxorbool, bool):
343 343 if ctxorbool:
344 344 return baseformname + ".merge"
345 345 elif 1 < len(ctxorbool.parents()):
346 346 return baseformname + ".merge"
347 347
348 348 return baseformname + ".normal"
349 349
350 350 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
351 351 editform='', **opts):
352 352 """get appropriate commit message editor according to '--edit' option
353 353
354 354 'finishdesc' is a function to be called with edited commit message
355 355 (= 'description' of the new changeset) just after editing, but
356 356 before checking empty-ness. It should return actual text to be
357 357 stored into history. This allows to change description before
358 358 storing.
359 359
360 360 'extramsg' is a extra message to be shown in the editor instead of
361 361 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
362 362 is automatically added.
363 363
364 364 'editform' is a dot-separated list of names, to distinguish
365 365 the purpose of commit text editing.
366 366
367 367 'getcommiteditor' returns 'commitforceeditor' regardless of
368 368 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
369 369 they are specific for usage in MQ.
370 370 """
371 371 if edit or finishdesc or extramsg:
372 372 return lambda r, c, s: commitforceeditor(r, c, s,
373 373 finishdesc=finishdesc,
374 374 extramsg=extramsg,
375 375 editform=editform)
376 376 elif editform:
377 377 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
378 378 else:
379 379 return commiteditor
380 380
381 381 def loglimit(opts):
382 382 """get the log limit according to option -l/--limit"""
383 383 limit = opts.get('limit')
384 384 if limit:
385 385 try:
386 386 limit = int(limit)
387 387 except ValueError:
388 388 raise error.Abort(_('limit must be a positive integer'))
389 389 if limit <= 0:
390 390 raise error.Abort(_('limit must be positive'))
391 391 else:
392 392 limit = None
393 393 return limit
394 394
395 395 def makefilename(repo, pat, node, desc=None,
396 396 total=None, seqno=None, revwidth=None, pathname=None):
397 397 node_expander = {
398 398 'H': lambda: hex(node),
399 399 'R': lambda: str(repo.changelog.rev(node)),
400 400 'h': lambda: short(node),
401 401 'm': lambda: re.sub('[^\w]', '_', str(desc))
402 402 }
403 403 expander = {
404 404 '%': lambda: '%',
405 405 'b': lambda: os.path.basename(repo.root),
406 406 }
407 407
408 408 try:
409 409 if node:
410 410 expander.update(node_expander)
411 411 if node:
412 412 expander['r'] = (lambda:
413 413 str(repo.changelog.rev(node)).zfill(revwidth or 0))
414 414 if total is not None:
415 415 expander['N'] = lambda: str(total)
416 416 if seqno is not None:
417 417 expander['n'] = lambda: str(seqno)
418 418 if total is not None and seqno is not None:
419 419 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
420 420 if pathname is not None:
421 421 expander['s'] = lambda: os.path.basename(pathname)
422 422 expander['d'] = lambda: os.path.dirname(pathname) or '.'
423 423 expander['p'] = lambda: pathname
424 424
425 425 newname = []
426 426 patlen = len(pat)
427 427 i = 0
428 428 while i < patlen:
429 429 c = pat[i]
430 430 if c == '%':
431 431 i += 1
432 432 c = pat[i]
433 433 c = expander[c]()
434 434 newname.append(c)
435 435 i += 1
436 436 return ''.join(newname)
437 437 except KeyError as inst:
438 438 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
439 439 inst.args[0])
440 440
441 441 class _unclosablefile(object):
442 442 def __init__(self, fp):
443 443 self._fp = fp
444 444
445 445 def close(self):
446 446 pass
447 447
448 448 def __iter__(self):
449 449 return iter(self._fp)
450 450
451 451 def __getattr__(self, attr):
452 452 return getattr(self._fp, attr)
453 453
454 454 def makefileobj(repo, pat, node=None, desc=None, total=None,
455 455 seqno=None, revwidth=None, mode='wb', modemap=None,
456 456 pathname=None):
457 457
458 458 writable = mode not in ('r', 'rb')
459 459
460 460 if not pat or pat == '-':
461 461 if writable:
462 462 fp = repo.ui.fout
463 463 else:
464 464 fp = repo.ui.fin
465 if util.safehasattr(fp, 'fileno'):
466 return os.fdopen(os.dup(fp.fileno()), mode)
467 else:
468 # if this fp can't be duped properly, return
469 # a dummy object that can be closed
470 return _unclosablefile(fp)
465 return _unclosablefile(fp)
471 466 if util.safehasattr(pat, 'write') and writable:
472 467 return pat
473 468 if util.safehasattr(pat, 'read') and 'r' in mode:
474 469 return pat
475 470 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
476 471 if modemap is not None:
477 472 mode = modemap.get(fn, mode)
478 473 if mode == 'wb':
479 474 modemap[fn] = 'ab'
480 475 return open(fn, mode)
481 476
482 477 def openrevlog(repo, cmd, file_, opts):
483 478 """opens the changelog, manifest, a filelog or a given revlog"""
484 479 cl = opts['changelog']
485 480 mf = opts['manifest']
486 481 dir = opts['dir']
487 482 msg = None
488 483 if cl and mf:
489 484 msg = _('cannot specify --changelog and --manifest at the same time')
490 485 elif cl and dir:
491 486 msg = _('cannot specify --changelog and --dir at the same time')
492 487 elif cl or mf:
493 488 if file_:
494 489 msg = _('cannot specify filename with --changelog or --manifest')
495 490 elif not repo:
496 491 msg = _('cannot specify --changelog or --manifest or --dir '
497 492 'without a repository')
498 493 if msg:
499 494 raise error.Abort(msg)
500 495
501 496 r = None
502 497 if repo:
503 498 if cl:
504 499 r = repo.unfiltered().changelog
505 500 elif dir:
506 501 if 'treemanifest' not in repo.requirements:
507 502 raise error.Abort(_("--dir can only be used on repos with "
508 503 "treemanifest enabled"))
509 504 dirlog = repo.dirlog(file_)
510 505 if len(dirlog):
511 506 r = dirlog
512 507 elif mf:
513 508 r = repo.manifest
514 509 elif file_:
515 510 filelog = repo.file(file_)
516 511 if len(filelog):
517 512 r = filelog
518 513 if not r:
519 514 if not file_:
520 515 raise error.CommandError(cmd, _('invalid arguments'))
521 516 if not os.path.isfile(file_):
522 517 raise error.Abort(_("revlog '%s' not found") % file_)
523 518 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
524 519 file_[:-2] + ".i")
525 520 return r
526 521
527 522 def copy(ui, repo, pats, opts, rename=False):
528 523 # called with the repo lock held
529 524 #
530 525 # hgsep => pathname that uses "/" to separate directories
531 526 # ossep => pathname that uses os.sep to separate directories
532 527 cwd = repo.getcwd()
533 528 targets = {}
534 529 after = opts.get("after")
535 530 dryrun = opts.get("dry_run")
536 531 wctx = repo[None]
537 532
538 533 def walkpat(pat):
539 534 srcs = []
540 535 if after:
541 536 badstates = '?'
542 537 else:
543 538 badstates = '?r'
544 539 m = scmutil.match(repo[None], [pat], opts, globbed=True)
545 540 for abs in repo.walk(m):
546 541 state = repo.dirstate[abs]
547 542 rel = m.rel(abs)
548 543 exact = m.exact(abs)
549 544 if state in badstates:
550 545 if exact and state == '?':
551 546 ui.warn(_('%s: not copying - file is not managed\n') % rel)
552 547 if exact and state == 'r':
553 548 ui.warn(_('%s: not copying - file has been marked for'
554 549 ' remove\n') % rel)
555 550 continue
556 551 # abs: hgsep
557 552 # rel: ossep
558 553 srcs.append((abs, rel, exact))
559 554 return srcs
560 555
561 556 # abssrc: hgsep
562 557 # relsrc: ossep
563 558 # otarget: ossep
564 559 def copyfile(abssrc, relsrc, otarget, exact):
565 560 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
566 561 if '/' in abstarget:
567 562 # We cannot normalize abstarget itself, this would prevent
568 563 # case only renames, like a => A.
569 564 abspath, absname = abstarget.rsplit('/', 1)
570 565 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
571 566 reltarget = repo.pathto(abstarget, cwd)
572 567 target = repo.wjoin(abstarget)
573 568 src = repo.wjoin(abssrc)
574 569 state = repo.dirstate[abstarget]
575 570
576 571 scmutil.checkportable(ui, abstarget)
577 572
578 573 # check for collisions
579 574 prevsrc = targets.get(abstarget)
580 575 if prevsrc is not None:
581 576 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
582 577 (reltarget, repo.pathto(abssrc, cwd),
583 578 repo.pathto(prevsrc, cwd)))
584 579 return
585 580
586 581 # check for overwrites
587 582 exists = os.path.lexists(target)
588 583 samefile = False
589 584 if exists and abssrc != abstarget:
590 585 if (repo.dirstate.normalize(abssrc) ==
591 586 repo.dirstate.normalize(abstarget)):
592 587 if not rename:
593 588 ui.warn(_("%s: can't copy - same file\n") % reltarget)
594 589 return
595 590 exists = False
596 591 samefile = True
597 592
598 593 if not after and exists or after and state in 'mn':
599 594 if not opts['force']:
600 595 ui.warn(_('%s: not overwriting - file exists\n') %
601 596 reltarget)
602 597 return
603 598
604 599 if after:
605 600 if not exists:
606 601 if rename:
607 602 ui.warn(_('%s: not recording move - %s does not exist\n') %
608 603 (relsrc, reltarget))
609 604 else:
610 605 ui.warn(_('%s: not recording copy - %s does not exist\n') %
611 606 (relsrc, reltarget))
612 607 return
613 608 elif not dryrun:
614 609 try:
615 610 if exists:
616 611 os.unlink(target)
617 612 targetdir = os.path.dirname(target) or '.'
618 613 if not os.path.isdir(targetdir):
619 614 os.makedirs(targetdir)
620 615 if samefile:
621 616 tmp = target + "~hgrename"
622 617 os.rename(src, tmp)
623 618 os.rename(tmp, target)
624 619 else:
625 620 util.copyfile(src, target)
626 621 srcexists = True
627 622 except IOError as inst:
628 623 if inst.errno == errno.ENOENT:
629 624 ui.warn(_('%s: deleted in working directory\n') % relsrc)
630 625 srcexists = False
631 626 else:
632 627 ui.warn(_('%s: cannot copy - %s\n') %
633 628 (relsrc, inst.strerror))
634 629 return True # report a failure
635 630
636 631 if ui.verbose or not exact:
637 632 if rename:
638 633 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
639 634 else:
640 635 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
641 636
642 637 targets[abstarget] = abssrc
643 638
644 639 # fix up dirstate
645 640 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
646 641 dryrun=dryrun, cwd=cwd)
647 642 if rename and not dryrun:
648 643 if not after and srcexists and not samefile:
649 644 util.unlinkpath(repo.wjoin(abssrc))
650 645 wctx.forget([abssrc])
651 646
652 647 # pat: ossep
653 648 # dest ossep
654 649 # srcs: list of (hgsep, hgsep, ossep, bool)
655 650 # return: function that takes hgsep and returns ossep
656 651 def targetpathfn(pat, dest, srcs):
657 652 if os.path.isdir(pat):
658 653 abspfx = pathutil.canonpath(repo.root, cwd, pat)
659 654 abspfx = util.localpath(abspfx)
660 655 if destdirexists:
661 656 striplen = len(os.path.split(abspfx)[0])
662 657 else:
663 658 striplen = len(abspfx)
664 659 if striplen:
665 660 striplen += len(os.sep)
666 661 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
667 662 elif destdirexists:
668 663 res = lambda p: os.path.join(dest,
669 664 os.path.basename(util.localpath(p)))
670 665 else:
671 666 res = lambda p: dest
672 667 return res
673 668
674 669 # pat: ossep
675 670 # dest ossep
676 671 # srcs: list of (hgsep, hgsep, ossep, bool)
677 672 # return: function that takes hgsep and returns ossep
678 673 def targetpathafterfn(pat, dest, srcs):
679 674 if matchmod.patkind(pat):
680 675 # a mercurial pattern
681 676 res = lambda p: os.path.join(dest,
682 677 os.path.basename(util.localpath(p)))
683 678 else:
684 679 abspfx = pathutil.canonpath(repo.root, cwd, pat)
685 680 if len(abspfx) < len(srcs[0][0]):
686 681 # A directory. Either the target path contains the last
687 682 # component of the source path or it does not.
688 683 def evalpath(striplen):
689 684 score = 0
690 685 for s in srcs:
691 686 t = os.path.join(dest, util.localpath(s[0])[striplen:])
692 687 if os.path.lexists(t):
693 688 score += 1
694 689 return score
695 690
696 691 abspfx = util.localpath(abspfx)
697 692 striplen = len(abspfx)
698 693 if striplen:
699 694 striplen += len(os.sep)
700 695 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
701 696 score = evalpath(striplen)
702 697 striplen1 = len(os.path.split(abspfx)[0])
703 698 if striplen1:
704 699 striplen1 += len(os.sep)
705 700 if evalpath(striplen1) > score:
706 701 striplen = striplen1
707 702 res = lambda p: os.path.join(dest,
708 703 util.localpath(p)[striplen:])
709 704 else:
710 705 # a file
711 706 if destdirexists:
712 707 res = lambda p: os.path.join(dest,
713 708 os.path.basename(util.localpath(p)))
714 709 else:
715 710 res = lambda p: dest
716 711 return res
717 712
718 713 pats = scmutil.expandpats(pats)
719 714 if not pats:
720 715 raise error.Abort(_('no source or destination specified'))
721 716 if len(pats) == 1:
722 717 raise error.Abort(_('no destination specified'))
723 718 dest = pats.pop()
724 719 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
725 720 if not destdirexists:
726 721 if len(pats) > 1 or matchmod.patkind(pats[0]):
727 722 raise error.Abort(_('with multiple sources, destination must be an '
728 723 'existing directory'))
729 724 if util.endswithsep(dest):
730 725 raise error.Abort(_('destination %s is not a directory') % dest)
731 726
732 727 tfn = targetpathfn
733 728 if after:
734 729 tfn = targetpathafterfn
735 730 copylist = []
736 731 for pat in pats:
737 732 srcs = walkpat(pat)
738 733 if not srcs:
739 734 continue
740 735 copylist.append((tfn(pat, dest, srcs), srcs))
741 736 if not copylist:
742 737 raise error.Abort(_('no files to copy'))
743 738
744 739 errors = 0
745 740 for targetpath, srcs in copylist:
746 741 for abssrc, relsrc, exact in srcs:
747 742 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
748 743 errors += 1
749 744
750 745 if errors:
751 746 ui.warn(_('(consider using --after)\n'))
752 747
753 748 return errors != 0
754 749
755 750 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
756 751 runargs=None, appendpid=False):
757 752 '''Run a command as a service.'''
758 753
759 754 def writepid(pid):
760 755 if opts['pid_file']:
761 756 if appendpid:
762 757 mode = 'a'
763 758 else:
764 759 mode = 'w'
765 760 fp = open(opts['pid_file'], mode)
766 761 fp.write(str(pid) + '\n')
767 762 fp.close()
768 763
769 764 if opts['daemon'] and not opts['daemon_pipefds']:
770 765 # Signal child process startup with file removal
771 766 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
772 767 os.close(lockfd)
773 768 try:
774 769 if not runargs:
775 770 runargs = util.hgcmd() + sys.argv[1:]
776 771 runargs.append('--daemon-pipefds=%s' % lockpath)
777 772 # Don't pass --cwd to the child process, because we've already
778 773 # changed directory.
779 774 for i in xrange(1, len(runargs)):
780 775 if runargs[i].startswith('--cwd='):
781 776 del runargs[i]
782 777 break
783 778 elif runargs[i].startswith('--cwd'):
784 779 del runargs[i:i + 2]
785 780 break
786 781 def condfn():
787 782 return not os.path.exists(lockpath)
788 783 pid = util.rundetached(runargs, condfn)
789 784 if pid < 0:
790 785 raise error.Abort(_('child process failed to start'))
791 786 writepid(pid)
792 787 finally:
793 788 try:
794 789 os.unlink(lockpath)
795 790 except OSError as e:
796 791 if e.errno != errno.ENOENT:
797 792 raise
798 793 if parentfn:
799 794 return parentfn(pid)
800 795 else:
801 796 return
802 797
803 798 if initfn:
804 799 initfn()
805 800
806 801 if not opts['daemon']:
807 802 writepid(os.getpid())
808 803
809 804 if opts['daemon_pipefds']:
810 805 lockpath = opts['daemon_pipefds']
811 806 try:
812 807 os.setsid()
813 808 except AttributeError:
814 809 pass
815 810 os.unlink(lockpath)
816 811 util.hidewindow()
817 812 sys.stdout.flush()
818 813 sys.stderr.flush()
819 814
820 815 nullfd = os.open(os.devnull, os.O_RDWR)
821 816 logfilefd = nullfd
822 817 if logfile:
823 818 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
824 819 os.dup2(nullfd, 0)
825 820 os.dup2(logfilefd, 1)
826 821 os.dup2(logfilefd, 2)
827 822 if nullfd not in (0, 1, 2):
828 823 os.close(nullfd)
829 824 if logfile and logfilefd not in (0, 1, 2):
830 825 os.close(logfilefd)
831 826
832 827 if runfn:
833 828 return runfn()
834 829
835 830 ## facility to let extension process additional data into an import patch
836 831 # list of identifier to be executed in order
837 832 extrapreimport = [] # run before commit
838 833 extrapostimport = [] # run after commit
839 834 # mapping from identifier to actual import function
840 835 #
841 836 # 'preimport' are run before the commit is made and are provided the following
842 837 # arguments:
843 838 # - repo: the localrepository instance,
844 839 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
845 840 # - extra: the future extra dictionary of the changeset, please mutate it,
846 841 # - opts: the import options.
847 842 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
848 843 # mutation of in memory commit and more. Feel free to rework the code to get
849 844 # there.
850 845 extrapreimportmap = {}
851 846 # 'postimport' are run after the commit is made and are provided the following
852 847 # argument:
853 848 # - ctx: the changectx created by import.
854 849 extrapostimportmap = {}
855 850
856 851 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
857 852 """Utility function used by commands.import to import a single patch
858 853
859 854 This function is explicitly defined here to help the evolve extension to
860 855 wrap this part of the import logic.
861 856
862 857 The API is currently a bit ugly because it a simple code translation from
863 858 the import command. Feel free to make it better.
864 859
865 860 :hunk: a patch (as a binary string)
866 861 :parents: nodes that will be parent of the created commit
867 862 :opts: the full dict of option passed to the import command
868 863 :msgs: list to save commit message to.
869 864 (used in case we need to save it when failing)
870 865 :updatefunc: a function that update a repo to a given node
871 866 updatefunc(<repo>, <node>)
872 867 """
873 868 # avoid cycle context -> subrepo -> cmdutil
874 869 import context
875 870 extractdata = patch.extract(ui, hunk)
876 871 tmpname = extractdata.get('filename')
877 872 message = extractdata.get('message')
878 873 user = extractdata.get('user')
879 874 date = extractdata.get('date')
880 875 branch = extractdata.get('branch')
881 876 nodeid = extractdata.get('nodeid')
882 877 p1 = extractdata.get('p1')
883 878 p2 = extractdata.get('p2')
884 879
885 880 update = not opts.get('bypass')
886 881 strip = opts["strip"]
887 882 prefix = opts["prefix"]
888 883 sim = float(opts.get('similarity') or 0)
889 884 if not tmpname:
890 885 return (None, None, False)
891 886 msg = _('applied to working directory')
892 887
893 888 rejects = False
894 889
895 890 try:
896 891 cmdline_message = logmessage(ui, opts)
897 892 if cmdline_message:
898 893 # pickup the cmdline msg
899 894 message = cmdline_message
900 895 elif message:
901 896 # pickup the patch msg
902 897 message = message.strip()
903 898 else:
904 899 # launch the editor
905 900 message = None
906 901 ui.debug('message:\n%s\n' % message)
907 902
908 903 if len(parents) == 1:
909 904 parents.append(repo[nullid])
910 905 if opts.get('exact'):
911 906 if not nodeid or not p1:
912 907 raise error.Abort(_('not a Mercurial patch'))
913 908 p1 = repo[p1]
914 909 p2 = repo[p2 or nullid]
915 910 elif p2:
916 911 try:
917 912 p1 = repo[p1]
918 913 p2 = repo[p2]
919 914 # Without any options, consider p2 only if the
920 915 # patch is being applied on top of the recorded
921 916 # first parent.
922 917 if p1 != parents[0]:
923 918 p1 = parents[0]
924 919 p2 = repo[nullid]
925 920 except error.RepoError:
926 921 p1, p2 = parents
927 922 if p2.node() == nullid:
928 923 ui.warn(_("warning: import the patch as a normal revision\n"
929 924 "(use --exact to import the patch as a merge)\n"))
930 925 else:
931 926 p1, p2 = parents
932 927
933 928 n = None
934 929 if update:
935 930 if p1 != parents[0]:
936 931 updatefunc(repo, p1.node())
937 932 if p2 != parents[1]:
938 933 repo.setparents(p1.node(), p2.node())
939 934
940 935 if opts.get('exact') or opts.get('import_branch'):
941 936 repo.dirstate.setbranch(branch or 'default')
942 937
943 938 partial = opts.get('partial', False)
944 939 files = set()
945 940 try:
946 941 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
947 942 files=files, eolmode=None, similarity=sim / 100.0)
948 943 except patch.PatchError as e:
949 944 if not partial:
950 945 raise error.Abort(str(e))
951 946 if partial:
952 947 rejects = True
953 948
954 949 files = list(files)
955 950 if opts.get('no_commit'):
956 951 if message:
957 952 msgs.append(message)
958 953 else:
959 954 if opts.get('exact') or p2:
960 955 # If you got here, you either use --force and know what
961 956 # you are doing or used --exact or a merge patch while
962 957 # being updated to its first parent.
963 958 m = None
964 959 else:
965 960 m = scmutil.matchfiles(repo, files or [])
966 961 editform = mergeeditform(repo[None], 'import.normal')
967 962 if opts.get('exact'):
968 963 editor = None
969 964 else:
970 965 editor = getcommiteditor(editform=editform, **opts)
971 966 allowemptyback = repo.ui.backupconfig('ui', 'allowemptycommit')
972 967 extra = {}
973 968 for idfunc in extrapreimport:
974 969 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
975 970 try:
976 971 if partial:
977 972 repo.ui.setconfig('ui', 'allowemptycommit', True)
978 973 n = repo.commit(message, opts.get('user') or user,
979 974 opts.get('date') or date, match=m,
980 975 editor=editor, extra=extra)
981 976 for idfunc in extrapostimport:
982 977 extrapostimportmap[idfunc](repo[n])
983 978 finally:
984 979 repo.ui.restoreconfig(allowemptyback)
985 980 else:
986 981 if opts.get('exact') or opts.get('import_branch'):
987 982 branch = branch or 'default'
988 983 else:
989 984 branch = p1.branch()
990 985 store = patch.filestore()
991 986 try:
992 987 files = set()
993 988 try:
994 989 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
995 990 files, eolmode=None)
996 991 except patch.PatchError as e:
997 992 raise error.Abort(str(e))
998 993 if opts.get('exact'):
999 994 editor = None
1000 995 else:
1001 996 editor = getcommiteditor(editform='import.bypass')
1002 997 memctx = context.makememctx(repo, (p1.node(), p2.node()),
1003 998 message,
1004 999 opts.get('user') or user,
1005 1000 opts.get('date') or date,
1006 1001 branch, files, store,
1007 1002 editor=editor)
1008 1003 n = memctx.commit()
1009 1004 finally:
1010 1005 store.close()
1011 1006 if opts.get('exact') and opts.get('no_commit'):
1012 1007 # --exact with --no-commit is still useful in that it does merge
1013 1008 # and branch bits
1014 1009 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1015 1010 elif opts.get('exact') and hex(n) != nodeid:
1016 1011 raise error.Abort(_('patch is damaged or loses information'))
1017 1012 if n:
1018 1013 # i18n: refers to a short changeset id
1019 1014 msg = _('created %s') % short(n)
1020 1015 return (msg, n, rejects)
1021 1016 finally:
1022 1017 os.unlink(tmpname)
1023 1018
1024 1019 # facility to let extensions include additional data in an exported patch
1025 1020 # list of identifiers to be executed in order
1026 1021 extraexport = []
1027 1022 # mapping from identifier to actual export function
1028 1023 # function as to return a string to be added to the header or None
1029 1024 # it is given two arguments (sequencenumber, changectx)
1030 1025 extraexportmap = {}
1031 1026
1032 1027 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1033 1028 opts=None, match=None):
1034 1029 '''export changesets as hg patches.'''
1035 1030
1036 1031 total = len(revs)
1037 1032 revwidth = max([len(str(rev)) for rev in revs])
1038 1033 filemode = {}
1039 1034
1040 1035 def single(rev, seqno, fp):
1041 1036 ctx = repo[rev]
1042 1037 node = ctx.node()
1043 1038 parents = [p.node() for p in ctx.parents() if p]
1044 1039 branch = ctx.branch()
1045 1040 if switch_parent:
1046 1041 parents.reverse()
1047 1042
1048 1043 if parents:
1049 1044 prev = parents[0]
1050 1045 else:
1051 1046 prev = nullid
1052 1047
1053 1048 shouldclose = False
1054 1049 if not fp and len(template) > 0:
1055 1050 desc_lines = ctx.description().rstrip().split('\n')
1056 1051 desc = desc_lines[0] #Commit always has a first line.
1057 1052 fp = makefileobj(repo, template, node, desc=desc, total=total,
1058 1053 seqno=seqno, revwidth=revwidth, mode='wb',
1059 1054 modemap=filemode)
1060 1055 shouldclose = True
1061 1056 if fp and not getattr(fp, 'name', '<unnamed>').startswith('<'):
1062 1057 repo.ui.note("%s\n" % fp.name)
1063 1058
1064 1059 if not fp:
1065 1060 write = repo.ui.write
1066 1061 else:
1067 1062 def write(s, **kw):
1068 1063 fp.write(s)
1069 1064
1070 1065 write("# HG changeset patch\n")
1071 1066 write("# User %s\n" % ctx.user())
1072 1067 write("# Date %d %d\n" % ctx.date())
1073 1068 write("# %s\n" % util.datestr(ctx.date()))
1074 1069 if branch and branch != 'default':
1075 1070 write("# Branch %s\n" % branch)
1076 1071 write("# Node ID %s\n" % hex(node))
1077 1072 write("# Parent %s\n" % hex(prev))
1078 1073 if len(parents) > 1:
1079 1074 write("# Parent %s\n" % hex(parents[1]))
1080 1075
1081 1076 for headerid in extraexport:
1082 1077 header = extraexportmap[headerid](seqno, ctx)
1083 1078 if header is not None:
1084 1079 write('# %s\n' % header)
1085 1080 write(ctx.description().rstrip())
1086 1081 write("\n\n")
1087 1082
1088 1083 for chunk, label in patch.diffui(repo, prev, node, match, opts=opts):
1089 1084 write(chunk, label=label)
1090 1085
1091 1086 if shouldclose:
1092 1087 fp.close()
1093 1088
1094 1089 for seqno, rev in enumerate(revs):
1095 1090 single(rev, seqno + 1, fp)
1096 1091
1097 1092 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1098 1093 changes=None, stat=False, fp=None, prefix='',
1099 1094 root='', listsubrepos=False):
1100 1095 '''show diff or diffstat.'''
1101 1096 if fp is None:
1102 1097 write = ui.write
1103 1098 else:
1104 1099 def write(s, **kw):
1105 1100 fp.write(s)
1106 1101
1107 1102 if root:
1108 1103 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1109 1104 else:
1110 1105 relroot = ''
1111 1106 if relroot != '':
1112 1107 # XXX relative roots currently don't work if the root is within a
1113 1108 # subrepo
1114 1109 uirelroot = match.uipath(relroot)
1115 1110 relroot += '/'
1116 1111 for matchroot in match.files():
1117 1112 if not matchroot.startswith(relroot):
1118 1113 ui.warn(_('warning: %s not inside relative root %s\n') % (
1119 1114 match.uipath(matchroot), uirelroot))
1120 1115
1121 1116 if stat:
1122 1117 diffopts = diffopts.copy(context=0)
1123 1118 width = 80
1124 1119 if not ui.plain():
1125 1120 width = ui.termwidth()
1126 1121 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1127 1122 prefix=prefix, relroot=relroot)
1128 1123 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1129 1124 width=width,
1130 1125 git=diffopts.git):
1131 1126 write(chunk, label=label)
1132 1127 else:
1133 1128 for chunk, label in patch.diffui(repo, node1, node2, match,
1134 1129 changes, diffopts, prefix=prefix,
1135 1130 relroot=relroot):
1136 1131 write(chunk, label=label)
1137 1132
1138 1133 if listsubrepos:
1139 1134 ctx1 = repo[node1]
1140 1135 ctx2 = repo[node2]
1141 1136 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1142 1137 tempnode2 = node2
1143 1138 try:
1144 1139 if node2 is not None:
1145 1140 tempnode2 = ctx2.substate[subpath][1]
1146 1141 except KeyError:
1147 1142 # A subrepo that existed in node1 was deleted between node1 and
1148 1143 # node2 (inclusive). Thus, ctx2's substate won't contain that
1149 1144 # subpath. The best we can do is to ignore it.
1150 1145 tempnode2 = None
1151 1146 submatch = matchmod.narrowmatcher(subpath, match)
1152 1147 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1153 1148 stat=stat, fp=fp, prefix=prefix)
1154 1149
1155 1150 class changeset_printer(object):
1156 1151 '''show changeset information when templating not requested.'''
1157 1152
1158 1153 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1159 1154 self.ui = ui
1160 1155 self.repo = repo
1161 1156 self.buffered = buffered
1162 1157 self.matchfn = matchfn
1163 1158 self.diffopts = diffopts
1164 1159 self.header = {}
1165 1160 self.hunk = {}
1166 1161 self.lastheader = None
1167 1162 self.footer = None
1168 1163
1169 1164 def flush(self, ctx):
1170 1165 rev = ctx.rev()
1171 1166 if rev in self.header:
1172 1167 h = self.header[rev]
1173 1168 if h != self.lastheader:
1174 1169 self.lastheader = h
1175 1170 self.ui.write(h)
1176 1171 del self.header[rev]
1177 1172 if rev in self.hunk:
1178 1173 self.ui.write(self.hunk[rev])
1179 1174 del self.hunk[rev]
1180 1175 return 1
1181 1176 return 0
1182 1177
1183 1178 def close(self):
1184 1179 if self.footer:
1185 1180 self.ui.write(self.footer)
1186 1181
1187 1182 def show(self, ctx, copies=None, matchfn=None, **props):
1188 1183 if self.buffered:
1189 1184 self.ui.pushbuffer(labeled=True)
1190 1185 self._show(ctx, copies, matchfn, props)
1191 1186 self.hunk[ctx.rev()] = self.ui.popbuffer()
1192 1187 else:
1193 1188 self._show(ctx, copies, matchfn, props)
1194 1189
1195 1190 def _show(self, ctx, copies, matchfn, props):
1196 1191 '''show a single changeset or file revision'''
1197 1192 changenode = ctx.node()
1198 1193 rev = ctx.rev()
1199 1194 if self.ui.debugflag:
1200 1195 hexfunc = hex
1201 1196 else:
1202 1197 hexfunc = short
1203 1198 # as of now, wctx.node() and wctx.rev() return None, but we want to
1204 1199 # show the same values as {node} and {rev} templatekw
1205 1200 revnode = (scmutil.intrev(rev), hexfunc(bin(ctx.hex())))
1206 1201
1207 1202 if self.ui.quiet:
1208 1203 self.ui.write("%d:%s\n" % revnode, label='log.node')
1209 1204 return
1210 1205
1211 1206 date = util.datestr(ctx.date())
1212 1207
1213 1208 # i18n: column positioning for "hg log"
1214 1209 self.ui.write(_("changeset: %d:%s\n") % revnode,
1215 1210 label='log.changeset changeset.%s' % ctx.phasestr())
1216 1211
1217 1212 # branches are shown first before any other names due to backwards
1218 1213 # compatibility
1219 1214 branch = ctx.branch()
1220 1215 # don't show the default branch name
1221 1216 if branch != 'default':
1222 1217 # i18n: column positioning for "hg log"
1223 1218 self.ui.write(_("branch: %s\n") % branch,
1224 1219 label='log.branch')
1225 1220
1226 1221 for name, ns in self.repo.names.iteritems():
1227 1222 # branches has special logic already handled above, so here we just
1228 1223 # skip it
1229 1224 if name == 'branches':
1230 1225 continue
1231 1226 # we will use the templatename as the color name since those two
1232 1227 # should be the same
1233 1228 for name in ns.names(self.repo, changenode):
1234 1229 self.ui.write(ns.logfmt % name,
1235 1230 label='log.%s' % ns.colorname)
1236 1231 if self.ui.debugflag:
1237 1232 # i18n: column positioning for "hg log"
1238 1233 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1239 1234 label='log.phase')
1240 1235 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1241 1236 label = 'log.parent changeset.%s' % pctx.phasestr()
1242 1237 # i18n: column positioning for "hg log"
1243 1238 self.ui.write(_("parent: %d:%s\n")
1244 1239 % (pctx.rev(), hexfunc(pctx.node())),
1245 1240 label=label)
1246 1241
1247 1242 if self.ui.debugflag and rev is not None:
1248 1243 mnode = ctx.manifestnode()
1249 1244 # i18n: column positioning for "hg log"
1250 1245 self.ui.write(_("manifest: %d:%s\n") %
1251 1246 (self.repo.manifest.rev(mnode), hex(mnode)),
1252 1247 label='ui.debug log.manifest')
1253 1248 # i18n: column positioning for "hg log"
1254 1249 self.ui.write(_("user: %s\n") % ctx.user(),
1255 1250 label='log.user')
1256 1251 # i18n: column positioning for "hg log"
1257 1252 self.ui.write(_("date: %s\n") % date,
1258 1253 label='log.date')
1259 1254
1260 1255 if self.ui.debugflag:
1261 1256 files = ctx.p1().status(ctx)[:3]
1262 1257 for key, value in zip([# i18n: column positioning for "hg log"
1263 1258 _("files:"),
1264 1259 # i18n: column positioning for "hg log"
1265 1260 _("files+:"),
1266 1261 # i18n: column positioning for "hg log"
1267 1262 _("files-:")], files):
1268 1263 if value:
1269 1264 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1270 1265 label='ui.debug log.files')
1271 1266 elif ctx.files() and self.ui.verbose:
1272 1267 # i18n: column positioning for "hg log"
1273 1268 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1274 1269 label='ui.note log.files')
1275 1270 if copies and self.ui.verbose:
1276 1271 copies = ['%s (%s)' % c for c in copies]
1277 1272 # i18n: column positioning for "hg log"
1278 1273 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1279 1274 label='ui.note log.copies')
1280 1275
1281 1276 extra = ctx.extra()
1282 1277 if extra and self.ui.debugflag:
1283 1278 for key, value in sorted(extra.items()):
1284 1279 # i18n: column positioning for "hg log"
1285 1280 self.ui.write(_("extra: %s=%s\n")
1286 1281 % (key, value.encode('string_escape')),
1287 1282 label='ui.debug log.extra')
1288 1283
1289 1284 description = ctx.description().strip()
1290 1285 if description:
1291 1286 if self.ui.verbose:
1292 1287 self.ui.write(_("description:\n"),
1293 1288 label='ui.note log.description')
1294 1289 self.ui.write(description,
1295 1290 label='ui.note log.description')
1296 1291 self.ui.write("\n\n")
1297 1292 else:
1298 1293 # i18n: column positioning for "hg log"
1299 1294 self.ui.write(_("summary: %s\n") %
1300 1295 description.splitlines()[0],
1301 1296 label='log.summary')
1302 1297 self.ui.write("\n")
1303 1298
1304 1299 self.showpatch(ctx, matchfn)
1305 1300
1306 1301 def showpatch(self, ctx, matchfn):
1307 1302 if not matchfn:
1308 1303 matchfn = self.matchfn
1309 1304 if matchfn:
1310 1305 stat = self.diffopts.get('stat')
1311 1306 diff = self.diffopts.get('patch')
1312 1307 diffopts = patch.diffallopts(self.ui, self.diffopts)
1313 1308 node = ctx.node()
1314 1309 prev = ctx.p1()
1315 1310 if stat:
1316 1311 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1317 1312 match=matchfn, stat=True)
1318 1313 if diff:
1319 1314 if stat:
1320 1315 self.ui.write("\n")
1321 1316 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1322 1317 match=matchfn, stat=False)
1323 1318 self.ui.write("\n")
1324 1319
1325 1320 class jsonchangeset(changeset_printer):
1326 1321 '''format changeset information.'''
1327 1322
1328 1323 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1329 1324 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1330 1325 self.cache = {}
1331 1326 self._first = True
1332 1327
1333 1328 def close(self):
1334 1329 if not self._first:
1335 1330 self.ui.write("\n]\n")
1336 1331 else:
1337 1332 self.ui.write("[]\n")
1338 1333
1339 1334 def _show(self, ctx, copies, matchfn, props):
1340 1335 '''show a single changeset or file revision'''
1341 1336 rev = ctx.rev()
1342 1337 if rev is None:
1343 1338 jrev = jnode = 'null'
1344 1339 else:
1345 1340 jrev = str(rev)
1346 1341 jnode = '"%s"' % hex(ctx.node())
1347 1342 j = encoding.jsonescape
1348 1343
1349 1344 if self._first:
1350 1345 self.ui.write("[\n {")
1351 1346 self._first = False
1352 1347 else:
1353 1348 self.ui.write(",\n {")
1354 1349
1355 1350 if self.ui.quiet:
1356 1351 self.ui.write('\n "rev": %s' % jrev)
1357 1352 self.ui.write(',\n "node": %s' % jnode)
1358 1353 self.ui.write('\n }')
1359 1354 return
1360 1355
1361 1356 self.ui.write('\n "rev": %s' % jrev)
1362 1357 self.ui.write(',\n "node": %s' % jnode)
1363 1358 self.ui.write(',\n "branch": "%s"' % j(ctx.branch()))
1364 1359 self.ui.write(',\n "phase": "%s"' % ctx.phasestr())
1365 1360 self.ui.write(',\n "user": "%s"' % j(ctx.user()))
1366 1361 self.ui.write(',\n "date": [%d, %d]' % ctx.date())
1367 1362 self.ui.write(',\n "desc": "%s"' % j(ctx.description()))
1368 1363
1369 1364 self.ui.write(',\n "bookmarks": [%s]' %
1370 1365 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1371 1366 self.ui.write(',\n "tags": [%s]' %
1372 1367 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1373 1368 self.ui.write(',\n "parents": [%s]' %
1374 1369 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1375 1370
1376 1371 if self.ui.debugflag:
1377 1372 if rev is None:
1378 1373 jmanifestnode = 'null'
1379 1374 else:
1380 1375 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1381 1376 self.ui.write(',\n "manifest": %s' % jmanifestnode)
1382 1377
1383 1378 self.ui.write(',\n "extra": {%s}' %
1384 1379 ", ".join('"%s": "%s"' % (j(k), j(v))
1385 1380 for k, v in ctx.extra().items()))
1386 1381
1387 1382 files = ctx.p1().status(ctx)
1388 1383 self.ui.write(',\n "modified": [%s]' %
1389 1384 ", ".join('"%s"' % j(f) for f in files[0]))
1390 1385 self.ui.write(',\n "added": [%s]' %
1391 1386 ", ".join('"%s"' % j(f) for f in files[1]))
1392 1387 self.ui.write(',\n "removed": [%s]' %
1393 1388 ", ".join('"%s"' % j(f) for f in files[2]))
1394 1389
1395 1390 elif self.ui.verbose:
1396 1391 self.ui.write(',\n "files": [%s]' %
1397 1392 ", ".join('"%s"' % j(f) for f in ctx.files()))
1398 1393
1399 1394 if copies:
1400 1395 self.ui.write(',\n "copies": {%s}' %
1401 1396 ", ".join('"%s": "%s"' % (j(k), j(v))
1402 1397 for k, v in copies))
1403 1398
1404 1399 matchfn = self.matchfn
1405 1400 if matchfn:
1406 1401 stat = self.diffopts.get('stat')
1407 1402 diff = self.diffopts.get('patch')
1408 1403 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1409 1404 node, prev = ctx.node(), ctx.p1().node()
1410 1405 if stat:
1411 1406 self.ui.pushbuffer()
1412 1407 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1413 1408 match=matchfn, stat=True)
1414 1409 self.ui.write(',\n "diffstat": "%s"' % j(self.ui.popbuffer()))
1415 1410 if diff:
1416 1411 self.ui.pushbuffer()
1417 1412 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1418 1413 match=matchfn, stat=False)
1419 1414 self.ui.write(',\n "diff": "%s"' % j(self.ui.popbuffer()))
1420 1415
1421 1416 self.ui.write("\n }")
1422 1417
1423 1418 class changeset_templater(changeset_printer):
1424 1419 '''format changeset information.'''
1425 1420
1426 1421 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1427 1422 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1428 1423 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
1429 1424 defaulttempl = {
1430 1425 'parent': '{rev}:{node|formatnode} ',
1431 1426 'manifest': '{rev}:{node|formatnode}',
1432 1427 'file_copy': '{name} ({source})',
1433 1428 'extra': '{key}={value|stringescape}'
1434 1429 }
1435 1430 # filecopy is preserved for compatibility reasons
1436 1431 defaulttempl['filecopy'] = defaulttempl['file_copy']
1437 1432 self.t = templater.templater(mapfile, {'formatnode': formatnode},
1438 1433 cache=defaulttempl)
1439 1434 if tmpl:
1440 1435 self.t.cache['changeset'] = tmpl
1441 1436
1442 1437 self.cache = {}
1443 1438
1444 1439 # find correct templates for current mode
1445 1440 tmplmodes = [
1446 1441 (True, None),
1447 1442 (self.ui.verbose, 'verbose'),
1448 1443 (self.ui.quiet, 'quiet'),
1449 1444 (self.ui.debugflag, 'debug'),
1450 1445 ]
1451 1446
1452 1447 self._parts = {'header': '', 'footer': '', 'changeset': 'changeset',
1453 1448 'docheader': '', 'docfooter': ''}
1454 1449 for mode, postfix in tmplmodes:
1455 1450 for t in self._parts:
1456 1451 cur = t
1457 1452 if postfix:
1458 1453 cur += "_" + postfix
1459 1454 if mode and cur in self.t:
1460 1455 self._parts[t] = cur
1461 1456
1462 1457 if self._parts['docheader']:
1463 1458 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1464 1459
1465 1460 def close(self):
1466 1461 if self._parts['docfooter']:
1467 1462 if not self.footer:
1468 1463 self.footer = ""
1469 1464 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1470 1465 return super(changeset_templater, self).close()
1471 1466
1472 1467 def _show(self, ctx, copies, matchfn, props):
1473 1468 '''show a single changeset or file revision'''
1474 1469 props = props.copy()
1475 1470 props.update(templatekw.keywords)
1476 1471 props['templ'] = self.t
1477 1472 props['ctx'] = ctx
1478 1473 props['repo'] = self.repo
1479 1474 props['revcache'] = {'copies': copies}
1480 1475 props['cache'] = self.cache
1481 1476
1482 1477 try:
1483 1478 # write header
1484 1479 if self._parts['header']:
1485 1480 h = templater.stringify(self.t(self._parts['header'], **props))
1486 1481 if self.buffered:
1487 1482 self.header[ctx.rev()] = h
1488 1483 else:
1489 1484 if self.lastheader != h:
1490 1485 self.lastheader = h
1491 1486 self.ui.write(h)
1492 1487
1493 1488 # write changeset metadata, then patch if requested
1494 1489 key = self._parts['changeset']
1495 1490 self.ui.write(templater.stringify(self.t(key, **props)))
1496 1491 self.showpatch(ctx, matchfn)
1497 1492
1498 1493 if self._parts['footer']:
1499 1494 if not self.footer:
1500 1495 self.footer = templater.stringify(
1501 1496 self.t(self._parts['footer'], **props))
1502 1497 except KeyError as inst:
1503 1498 msg = _("%s: no key named '%s'")
1504 1499 raise error.Abort(msg % (self.t.mapfile, inst.args[0]))
1505 1500 except SyntaxError as inst:
1506 1501 raise error.Abort('%s: %s' % (self.t.mapfile, inst.args[0]))
1507 1502
1508 1503 def gettemplate(ui, tmpl, style):
1509 1504 """
1510 1505 Find the template matching the given template spec or style.
1511 1506 """
1512 1507
1513 1508 # ui settings
1514 1509 if not tmpl and not style: # template are stronger than style
1515 1510 tmpl = ui.config('ui', 'logtemplate')
1516 1511 if tmpl:
1517 1512 try:
1518 1513 tmpl = templater.unquotestring(tmpl)
1519 1514 except SyntaxError:
1520 1515 pass
1521 1516 return tmpl, None
1522 1517 else:
1523 1518 style = util.expandpath(ui.config('ui', 'style', ''))
1524 1519
1525 1520 if not tmpl and style:
1526 1521 mapfile = style
1527 1522 if not os.path.split(mapfile)[0]:
1528 1523 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1529 1524 or templater.templatepath(mapfile))
1530 1525 if mapname:
1531 1526 mapfile = mapname
1532 1527 return None, mapfile
1533 1528
1534 1529 if not tmpl:
1535 1530 return None, None
1536 1531
1537 1532 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1538 1533
1539 1534 def show_changeset(ui, repo, opts, buffered=False):
1540 1535 """show one changeset using template or regular display.
1541 1536
1542 1537 Display format will be the first non-empty hit of:
1543 1538 1. option 'template'
1544 1539 2. option 'style'
1545 1540 3. [ui] setting 'logtemplate'
1546 1541 4. [ui] setting 'style'
1547 1542 If all of these values are either the unset or the empty string,
1548 1543 regular display via changeset_printer() is done.
1549 1544 """
1550 1545 # options
1551 1546 matchfn = None
1552 1547 if opts.get('patch') or opts.get('stat'):
1553 1548 matchfn = scmutil.matchall(repo)
1554 1549
1555 1550 if opts.get('template') == 'json':
1556 1551 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1557 1552
1558 1553 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1559 1554
1560 1555 if not tmpl and not mapfile:
1561 1556 return changeset_printer(ui, repo, matchfn, opts, buffered)
1562 1557
1563 1558 try:
1564 1559 t = changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile,
1565 1560 buffered)
1566 1561 except SyntaxError as inst:
1567 1562 raise error.Abort(inst.args[0])
1568 1563 return t
1569 1564
1570 1565 def showmarker(ui, marker):
1571 1566 """utility function to display obsolescence marker in a readable way
1572 1567
1573 1568 To be used by debug function."""
1574 1569 ui.write(hex(marker.precnode()))
1575 1570 for repl in marker.succnodes():
1576 1571 ui.write(' ')
1577 1572 ui.write(hex(repl))
1578 1573 ui.write(' %X ' % marker.flags())
1579 1574 parents = marker.parentnodes()
1580 1575 if parents is not None:
1581 1576 ui.write('{%s} ' % ', '.join(hex(p) for p in parents))
1582 1577 ui.write('(%s) ' % util.datestr(marker.date()))
1583 1578 ui.write('{%s}' % (', '.join('%r: %r' % t for t in
1584 1579 sorted(marker.metadata().items())
1585 1580 if t[0] != 'date')))
1586 1581 ui.write('\n')
1587 1582
1588 1583 def finddate(ui, repo, date):
1589 1584 """Find the tipmost changeset that matches the given date spec"""
1590 1585
1591 1586 df = util.matchdate(date)
1592 1587 m = scmutil.matchall(repo)
1593 1588 results = {}
1594 1589
1595 1590 def prep(ctx, fns):
1596 1591 d = ctx.date()
1597 1592 if df(d[0]):
1598 1593 results[ctx.rev()] = d
1599 1594
1600 1595 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1601 1596 rev = ctx.rev()
1602 1597 if rev in results:
1603 1598 ui.status(_("found revision %s from %s\n") %
1604 1599 (rev, util.datestr(results[rev])))
1605 1600 return str(rev)
1606 1601
1607 1602 raise error.Abort(_("revision matching date not found"))
1608 1603
1609 1604 def increasingwindows(windowsize=8, sizelimit=512):
1610 1605 while True:
1611 1606 yield windowsize
1612 1607 if windowsize < sizelimit:
1613 1608 windowsize *= 2
1614 1609
1615 1610 class FileWalkError(Exception):
1616 1611 pass
1617 1612
1618 1613 def walkfilerevs(repo, match, follow, revs, fncache):
1619 1614 '''Walks the file history for the matched files.
1620 1615
1621 1616 Returns the changeset revs that are involved in the file history.
1622 1617
1623 1618 Throws FileWalkError if the file history can't be walked using
1624 1619 filelogs alone.
1625 1620 '''
1626 1621 wanted = set()
1627 1622 copies = []
1628 1623 minrev, maxrev = min(revs), max(revs)
1629 1624 def filerevgen(filelog, last):
1630 1625 """
1631 1626 Only files, no patterns. Check the history of each file.
1632 1627
1633 1628 Examines filelog entries within minrev, maxrev linkrev range
1634 1629 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1635 1630 tuples in backwards order
1636 1631 """
1637 1632 cl_count = len(repo)
1638 1633 revs = []
1639 1634 for j in xrange(0, last + 1):
1640 1635 linkrev = filelog.linkrev(j)
1641 1636 if linkrev < minrev:
1642 1637 continue
1643 1638 # only yield rev for which we have the changelog, it can
1644 1639 # happen while doing "hg log" during a pull or commit
1645 1640 if linkrev >= cl_count:
1646 1641 break
1647 1642
1648 1643 parentlinkrevs = []
1649 1644 for p in filelog.parentrevs(j):
1650 1645 if p != nullrev:
1651 1646 parentlinkrevs.append(filelog.linkrev(p))
1652 1647 n = filelog.node(j)
1653 1648 revs.append((linkrev, parentlinkrevs,
1654 1649 follow and filelog.renamed(n)))
1655 1650
1656 1651 return reversed(revs)
1657 1652 def iterfiles():
1658 1653 pctx = repo['.']
1659 1654 for filename in match.files():
1660 1655 if follow:
1661 1656 if filename not in pctx:
1662 1657 raise error.Abort(_('cannot follow file not in parent '
1663 1658 'revision: "%s"') % filename)
1664 1659 yield filename, pctx[filename].filenode()
1665 1660 else:
1666 1661 yield filename, None
1667 1662 for filename_node in copies:
1668 1663 yield filename_node
1669 1664
1670 1665 for file_, node in iterfiles():
1671 1666 filelog = repo.file(file_)
1672 1667 if not len(filelog):
1673 1668 if node is None:
1674 1669 # A zero count may be a directory or deleted file, so
1675 1670 # try to find matching entries on the slow path.
1676 1671 if follow:
1677 1672 raise error.Abort(
1678 1673 _('cannot follow nonexistent file: "%s"') % file_)
1679 1674 raise FileWalkError("Cannot walk via filelog")
1680 1675 else:
1681 1676 continue
1682 1677
1683 1678 if node is None:
1684 1679 last = len(filelog) - 1
1685 1680 else:
1686 1681 last = filelog.rev(node)
1687 1682
1688 1683 # keep track of all ancestors of the file
1689 1684 ancestors = set([filelog.linkrev(last)])
1690 1685
1691 1686 # iterate from latest to oldest revision
1692 1687 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1693 1688 if not follow:
1694 1689 if rev > maxrev:
1695 1690 continue
1696 1691 else:
1697 1692 # Note that last might not be the first interesting
1698 1693 # rev to us:
1699 1694 # if the file has been changed after maxrev, we'll
1700 1695 # have linkrev(last) > maxrev, and we still need
1701 1696 # to explore the file graph
1702 1697 if rev not in ancestors:
1703 1698 continue
1704 1699 # XXX insert 1327 fix here
1705 1700 if flparentlinkrevs:
1706 1701 ancestors.update(flparentlinkrevs)
1707 1702
1708 1703 fncache.setdefault(rev, []).append(file_)
1709 1704 wanted.add(rev)
1710 1705 if copied:
1711 1706 copies.append(copied)
1712 1707
1713 1708 return wanted
1714 1709
1715 1710 class _followfilter(object):
1716 1711 def __init__(self, repo, onlyfirst=False):
1717 1712 self.repo = repo
1718 1713 self.startrev = nullrev
1719 1714 self.roots = set()
1720 1715 self.onlyfirst = onlyfirst
1721 1716
1722 1717 def match(self, rev):
1723 1718 def realparents(rev):
1724 1719 if self.onlyfirst:
1725 1720 return self.repo.changelog.parentrevs(rev)[0:1]
1726 1721 else:
1727 1722 return filter(lambda x: x != nullrev,
1728 1723 self.repo.changelog.parentrevs(rev))
1729 1724
1730 1725 if self.startrev == nullrev:
1731 1726 self.startrev = rev
1732 1727 return True
1733 1728
1734 1729 if rev > self.startrev:
1735 1730 # forward: all descendants
1736 1731 if not self.roots:
1737 1732 self.roots.add(self.startrev)
1738 1733 for parent in realparents(rev):
1739 1734 if parent in self.roots:
1740 1735 self.roots.add(rev)
1741 1736 return True
1742 1737 else:
1743 1738 # backwards: all parents
1744 1739 if not self.roots:
1745 1740 self.roots.update(realparents(self.startrev))
1746 1741 if rev in self.roots:
1747 1742 self.roots.remove(rev)
1748 1743 self.roots.update(realparents(rev))
1749 1744 return True
1750 1745
1751 1746 return False
1752 1747
1753 1748 def walkchangerevs(repo, match, opts, prepare):
1754 1749 '''Iterate over files and the revs in which they changed.
1755 1750
1756 1751 Callers most commonly need to iterate backwards over the history
1757 1752 in which they are interested. Doing so has awful (quadratic-looking)
1758 1753 performance, so we use iterators in a "windowed" way.
1759 1754
1760 1755 We walk a window of revisions in the desired order. Within the
1761 1756 window, we first walk forwards to gather data, then in the desired
1762 1757 order (usually backwards) to display it.
1763 1758
1764 1759 This function returns an iterator yielding contexts. Before
1765 1760 yielding each context, the iterator will first call the prepare
1766 1761 function on each context in the window in forward order.'''
1767 1762
1768 1763 follow = opts.get('follow') or opts.get('follow_first')
1769 1764 revs = _logrevs(repo, opts)
1770 1765 if not revs:
1771 1766 return []
1772 1767 wanted = set()
1773 1768 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1774 1769 opts.get('removed'))
1775 1770 fncache = {}
1776 1771 change = repo.changectx
1777 1772
1778 1773 # First step is to fill wanted, the set of revisions that we want to yield.
1779 1774 # When it does not induce extra cost, we also fill fncache for revisions in
1780 1775 # wanted: a cache of filenames that were changed (ctx.files()) and that
1781 1776 # match the file filtering conditions.
1782 1777
1783 1778 if match.always():
1784 1779 # No files, no patterns. Display all revs.
1785 1780 wanted = revs
1786 1781 elif not slowpath:
1787 1782 # We only have to read through the filelog to find wanted revisions
1788 1783
1789 1784 try:
1790 1785 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1791 1786 except FileWalkError:
1792 1787 slowpath = True
1793 1788
1794 1789 # We decided to fall back to the slowpath because at least one
1795 1790 # of the paths was not a file. Check to see if at least one of them
1796 1791 # existed in history, otherwise simply return
1797 1792 for path in match.files():
1798 1793 if path == '.' or path in repo.store:
1799 1794 break
1800 1795 else:
1801 1796 return []
1802 1797
1803 1798 if slowpath:
1804 1799 # We have to read the changelog to match filenames against
1805 1800 # changed files
1806 1801
1807 1802 if follow:
1808 1803 raise error.Abort(_('can only follow copies/renames for explicit '
1809 1804 'filenames'))
1810 1805
1811 1806 # The slow path checks files modified in every changeset.
1812 1807 # This is really slow on large repos, so compute the set lazily.
1813 1808 class lazywantedset(object):
1814 1809 def __init__(self):
1815 1810 self.set = set()
1816 1811 self.revs = set(revs)
1817 1812
1818 1813 # No need to worry about locality here because it will be accessed
1819 1814 # in the same order as the increasing window below.
1820 1815 def __contains__(self, value):
1821 1816 if value in self.set:
1822 1817 return True
1823 1818 elif not value in self.revs:
1824 1819 return False
1825 1820 else:
1826 1821 self.revs.discard(value)
1827 1822 ctx = change(value)
1828 1823 matches = filter(match, ctx.files())
1829 1824 if matches:
1830 1825 fncache[value] = matches
1831 1826 self.set.add(value)
1832 1827 return True
1833 1828 return False
1834 1829
1835 1830 def discard(self, value):
1836 1831 self.revs.discard(value)
1837 1832 self.set.discard(value)
1838 1833
1839 1834 wanted = lazywantedset()
1840 1835
1841 1836 # it might be worthwhile to do this in the iterator if the rev range
1842 1837 # is descending and the prune args are all within that range
1843 1838 for rev in opts.get('prune', ()):
1844 1839 rev = repo[rev].rev()
1845 1840 ff = _followfilter(repo)
1846 1841 stop = min(revs[0], revs[-1])
1847 1842 for x in xrange(rev, stop - 1, -1):
1848 1843 if ff.match(x):
1849 1844 wanted = wanted - [x]
1850 1845
1851 1846 # Now that wanted is correctly initialized, we can iterate over the
1852 1847 # revision range, yielding only revisions in wanted.
1853 1848 def iterate():
1854 1849 if follow and match.always():
1855 1850 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1856 1851 def want(rev):
1857 1852 return ff.match(rev) and rev in wanted
1858 1853 else:
1859 1854 def want(rev):
1860 1855 return rev in wanted
1861 1856
1862 1857 it = iter(revs)
1863 1858 stopiteration = False
1864 1859 for windowsize in increasingwindows():
1865 1860 nrevs = []
1866 1861 for i in xrange(windowsize):
1867 1862 rev = next(it, None)
1868 1863 if rev is None:
1869 1864 stopiteration = True
1870 1865 break
1871 1866 elif want(rev):
1872 1867 nrevs.append(rev)
1873 1868 for rev in sorted(nrevs):
1874 1869 fns = fncache.get(rev)
1875 1870 ctx = change(rev)
1876 1871 if not fns:
1877 1872 def fns_generator():
1878 1873 for f in ctx.files():
1879 1874 if match(f):
1880 1875 yield f
1881 1876 fns = fns_generator()
1882 1877 prepare(ctx, fns)
1883 1878 for rev in nrevs:
1884 1879 yield change(rev)
1885 1880
1886 1881 if stopiteration:
1887 1882 break
1888 1883
1889 1884 return iterate()
1890 1885
1891 1886 def _makefollowlogfilematcher(repo, files, followfirst):
1892 1887 # When displaying a revision with --patch --follow FILE, we have
1893 1888 # to know which file of the revision must be diffed. With
1894 1889 # --follow, we want the names of the ancestors of FILE in the
1895 1890 # revision, stored in "fcache". "fcache" is populated by
1896 1891 # reproducing the graph traversal already done by --follow revset
1897 1892 # and relating linkrevs to file names (which is not "correct" but
1898 1893 # good enough).
1899 1894 fcache = {}
1900 1895 fcacheready = [False]
1901 1896 pctx = repo['.']
1902 1897
1903 1898 def populate():
1904 1899 for fn in files:
1905 1900 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
1906 1901 for c in i:
1907 1902 fcache.setdefault(c.linkrev(), set()).add(c.path())
1908 1903
1909 1904 def filematcher(rev):
1910 1905 if not fcacheready[0]:
1911 1906 # Lazy initialization
1912 1907 fcacheready[0] = True
1913 1908 populate()
1914 1909 return scmutil.matchfiles(repo, fcache.get(rev, []))
1915 1910
1916 1911 return filematcher
1917 1912
1918 1913 def _makenofollowlogfilematcher(repo, pats, opts):
1919 1914 '''hook for extensions to override the filematcher for non-follow cases'''
1920 1915 return None
1921 1916
1922 1917 def _makelogrevset(repo, pats, opts, revs):
1923 1918 """Return (expr, filematcher) where expr is a revset string built
1924 1919 from log options and file patterns or None. If --stat or --patch
1925 1920 are not passed filematcher is None. Otherwise it is a callable
1926 1921 taking a revision number and returning a match objects filtering
1927 1922 the files to be detailed when displaying the revision.
1928 1923 """
1929 1924 opt2revset = {
1930 1925 'no_merges': ('not merge()', None),
1931 1926 'only_merges': ('merge()', None),
1932 1927 '_ancestors': ('ancestors(%(val)s)', None),
1933 1928 '_fancestors': ('_firstancestors(%(val)s)', None),
1934 1929 '_descendants': ('descendants(%(val)s)', None),
1935 1930 '_fdescendants': ('_firstdescendants(%(val)s)', None),
1936 1931 '_matchfiles': ('_matchfiles(%(val)s)', None),
1937 1932 'date': ('date(%(val)r)', None),
1938 1933 'branch': ('branch(%(val)r)', ' or '),
1939 1934 '_patslog': ('filelog(%(val)r)', ' or '),
1940 1935 '_patsfollow': ('follow(%(val)r)', ' or '),
1941 1936 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
1942 1937 'keyword': ('keyword(%(val)r)', ' or '),
1943 1938 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
1944 1939 'user': ('user(%(val)r)', ' or '),
1945 1940 }
1946 1941
1947 1942 opts = dict(opts)
1948 1943 # follow or not follow?
1949 1944 follow = opts.get('follow') or opts.get('follow_first')
1950 1945 if opts.get('follow_first'):
1951 1946 followfirst = 1
1952 1947 else:
1953 1948 followfirst = 0
1954 1949 # --follow with FILE behavior depends on revs...
1955 1950 it = iter(revs)
1956 1951 startrev = it.next()
1957 1952 followdescendants = startrev < next(it, startrev)
1958 1953
1959 1954 # branch and only_branch are really aliases and must be handled at
1960 1955 # the same time
1961 1956 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
1962 1957 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
1963 1958 # pats/include/exclude are passed to match.match() directly in
1964 1959 # _matchfiles() revset but walkchangerevs() builds its matcher with
1965 1960 # scmutil.match(). The difference is input pats are globbed on
1966 1961 # platforms without shell expansion (windows).
1967 1962 wctx = repo[None]
1968 1963 match, pats = scmutil.matchandpats(wctx, pats, opts)
1969 1964 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1970 1965 opts.get('removed'))
1971 1966 if not slowpath:
1972 1967 for f in match.files():
1973 1968 if follow and f not in wctx:
1974 1969 # If the file exists, it may be a directory, so let it
1975 1970 # take the slow path.
1976 1971 if os.path.exists(repo.wjoin(f)):
1977 1972 slowpath = True
1978 1973 continue
1979 1974 else:
1980 1975 raise error.Abort(_('cannot follow file not in parent '
1981 1976 'revision: "%s"') % f)
1982 1977 filelog = repo.file(f)
1983 1978 if not filelog:
1984 1979 # A zero count may be a directory or deleted file, so
1985 1980 # try to find matching entries on the slow path.
1986 1981 if follow:
1987 1982 raise error.Abort(
1988 1983 _('cannot follow nonexistent file: "%s"') % f)
1989 1984 slowpath = True
1990 1985
1991 1986 # We decided to fall back to the slowpath because at least one
1992 1987 # of the paths was not a file. Check to see if at least one of them
1993 1988 # existed in history - in that case, we'll continue down the
1994 1989 # slowpath; otherwise, we can turn off the slowpath
1995 1990 if slowpath:
1996 1991 for path in match.files():
1997 1992 if path == '.' or path in repo.store:
1998 1993 break
1999 1994 else:
2000 1995 slowpath = False
2001 1996
2002 1997 fpats = ('_patsfollow', '_patsfollowfirst')
2003 1998 fnopats = (('_ancestors', '_fancestors'),
2004 1999 ('_descendants', '_fdescendants'))
2005 2000 if slowpath:
2006 2001 # See walkchangerevs() slow path.
2007 2002 #
2008 2003 # pats/include/exclude cannot be represented as separate
2009 2004 # revset expressions as their filtering logic applies at file
2010 2005 # level. For instance "-I a -X a" matches a revision touching
2011 2006 # "a" and "b" while "file(a) and not file(b)" does
2012 2007 # not. Besides, filesets are evaluated against the working
2013 2008 # directory.
2014 2009 matchargs = ['r:', 'd:relpath']
2015 2010 for p in pats:
2016 2011 matchargs.append('p:' + p)
2017 2012 for p in opts.get('include', []):
2018 2013 matchargs.append('i:' + p)
2019 2014 for p in opts.get('exclude', []):
2020 2015 matchargs.append('x:' + p)
2021 2016 matchargs = ','.join(('%r' % p) for p in matchargs)
2022 2017 opts['_matchfiles'] = matchargs
2023 2018 if follow:
2024 2019 opts[fnopats[0][followfirst]] = '.'
2025 2020 else:
2026 2021 if follow:
2027 2022 if pats:
2028 2023 # follow() revset interprets its file argument as a
2029 2024 # manifest entry, so use match.files(), not pats.
2030 2025 opts[fpats[followfirst]] = list(match.files())
2031 2026 else:
2032 2027 op = fnopats[followdescendants][followfirst]
2033 2028 opts[op] = 'rev(%d)' % startrev
2034 2029 else:
2035 2030 opts['_patslog'] = list(pats)
2036 2031
2037 2032 filematcher = None
2038 2033 if opts.get('patch') or opts.get('stat'):
2039 2034 # When following files, track renames via a special matcher.
2040 2035 # If we're forced to take the slowpath it means we're following
2041 2036 # at least one pattern/directory, so don't bother with rename tracking.
2042 2037 if follow and not match.always() and not slowpath:
2043 2038 # _makefollowlogfilematcher expects its files argument to be
2044 2039 # relative to the repo root, so use match.files(), not pats.
2045 2040 filematcher = _makefollowlogfilematcher(repo, match.files(),
2046 2041 followfirst)
2047 2042 else:
2048 2043 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2049 2044 if filematcher is None:
2050 2045 filematcher = lambda rev: match
2051 2046
2052 2047 expr = []
2053 2048 for op, val in sorted(opts.iteritems()):
2054 2049 if not val:
2055 2050 continue
2056 2051 if op not in opt2revset:
2057 2052 continue
2058 2053 revop, andor = opt2revset[op]
2059 2054 if '%(val)' not in revop:
2060 2055 expr.append(revop)
2061 2056 else:
2062 2057 if not isinstance(val, list):
2063 2058 e = revop % {'val': val}
2064 2059 else:
2065 2060 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2066 2061 expr.append(e)
2067 2062
2068 2063 if expr:
2069 2064 expr = '(' + ' and '.join(expr) + ')'
2070 2065 else:
2071 2066 expr = None
2072 2067 return expr, filematcher
2073 2068
2074 2069 def _logrevs(repo, opts):
2075 2070 # Default --rev value depends on --follow but --follow behavior
2076 2071 # depends on revisions resolved from --rev...
2077 2072 follow = opts.get('follow') or opts.get('follow_first')
2078 2073 if opts.get('rev'):
2079 2074 revs = scmutil.revrange(repo, opts['rev'])
2080 2075 elif follow and repo.dirstate.p1() == nullid:
2081 2076 revs = revset.baseset()
2082 2077 elif follow:
2083 2078 revs = repo.revs('reverse(:.)')
2084 2079 else:
2085 2080 revs = revset.spanset(repo)
2086 2081 revs.reverse()
2087 2082 return revs
2088 2083
2089 2084 def getgraphlogrevs(repo, pats, opts):
2090 2085 """Return (revs, expr, filematcher) where revs is an iterable of
2091 2086 revision numbers, expr is a revset string built from log options
2092 2087 and file patterns or None, and used to filter 'revs'. If --stat or
2093 2088 --patch are not passed filematcher is None. Otherwise it is a
2094 2089 callable taking a revision number and returning a match objects
2095 2090 filtering the files to be detailed when displaying the revision.
2096 2091 """
2097 2092 limit = loglimit(opts)
2098 2093 revs = _logrevs(repo, opts)
2099 2094 if not revs:
2100 2095 return revset.baseset(), None, None
2101 2096 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2102 2097 if opts.get('rev'):
2103 2098 # User-specified revs might be unsorted, but don't sort before
2104 2099 # _makelogrevset because it might depend on the order of revs
2105 2100 revs.sort(reverse=True)
2106 2101 if expr:
2107 2102 # Revset matchers often operate faster on revisions in changelog
2108 2103 # order, because most filters deal with the changelog.
2109 2104 revs.reverse()
2110 2105 matcher = revset.match(repo.ui, expr)
2111 2106 # Revset matches can reorder revisions. "A or B" typically returns
2112 2107 # returns the revision matching A then the revision matching B. Sort
2113 2108 # again to fix that.
2114 2109 revs = matcher(repo, revs)
2115 2110 revs.sort(reverse=True)
2116 2111 if limit is not None:
2117 2112 limitedrevs = []
2118 2113 for idx, rev in enumerate(revs):
2119 2114 if idx >= limit:
2120 2115 break
2121 2116 limitedrevs.append(rev)
2122 2117 revs = revset.baseset(limitedrevs)
2123 2118
2124 2119 return revs, expr, filematcher
2125 2120
2126 2121 def getlogrevs(repo, pats, opts):
2127 2122 """Return (revs, expr, filematcher) where revs is an iterable of
2128 2123 revision numbers, expr is a revset string built from log options
2129 2124 and file patterns or None, and used to filter 'revs'. If --stat or
2130 2125 --patch are not passed filematcher is None. Otherwise it is a
2131 2126 callable taking a revision number and returning a match objects
2132 2127 filtering the files to be detailed when displaying the revision.
2133 2128 """
2134 2129 limit = loglimit(opts)
2135 2130 revs = _logrevs(repo, opts)
2136 2131 if not revs:
2137 2132 return revset.baseset([]), None, None
2138 2133 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2139 2134 if expr:
2140 2135 # Revset matchers often operate faster on revisions in changelog
2141 2136 # order, because most filters deal with the changelog.
2142 2137 if not opts.get('rev'):
2143 2138 revs.reverse()
2144 2139 matcher = revset.match(repo.ui, expr)
2145 2140 # Revset matches can reorder revisions. "A or B" typically returns
2146 2141 # returns the revision matching A then the revision matching B. Sort
2147 2142 # again to fix that.
2148 2143 revs = matcher(repo, revs)
2149 2144 if not opts.get('rev'):
2150 2145 revs.sort(reverse=True)
2151 2146 if limit is not None:
2152 2147 limitedrevs = []
2153 2148 for idx, r in enumerate(revs):
2154 2149 if limit <= idx:
2155 2150 break
2156 2151 limitedrevs.append(r)
2157 2152 revs = revset.baseset(limitedrevs)
2158 2153
2159 2154 return revs, expr, filematcher
2160 2155
2161 2156 def _graphnodeformatter(ui, displayer):
2162 2157 spec = ui.config('ui', 'graphnodetemplate')
2163 2158 if not spec:
2164 2159 return templatekw.showgraphnode # fast path for "{graphnode}"
2165 2160
2166 2161 templ = formatter.gettemplater(ui, 'graphnode', spec)
2167 2162 cache = {}
2168 2163 if isinstance(displayer, changeset_templater):
2169 2164 cache = displayer.cache # reuse cache of slow templates
2170 2165 props = templatekw.keywords.copy()
2171 2166 props['templ'] = templ
2172 2167 props['cache'] = cache
2173 2168 def formatnode(repo, ctx):
2174 2169 props['ctx'] = ctx
2175 2170 props['repo'] = repo
2176 2171 props['revcache'] = {}
2177 2172 return templater.stringify(templ('graphnode', **props))
2178 2173 return formatnode
2179 2174
2180 2175 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2181 2176 filematcher=None):
2182 2177 formatnode = _graphnodeformatter(ui, displayer)
2183 2178 seen, state = [], graphmod.asciistate()
2184 2179 for rev, type, ctx, parents in dag:
2185 2180 char = formatnode(repo, ctx)
2186 2181 copies = None
2187 2182 if getrenamed and ctx.rev():
2188 2183 copies = []
2189 2184 for fn in ctx.files():
2190 2185 rename = getrenamed(fn, ctx.rev())
2191 2186 if rename:
2192 2187 copies.append((fn, rename[0]))
2193 2188 revmatchfn = None
2194 2189 if filematcher is not None:
2195 2190 revmatchfn = filematcher(ctx.rev())
2196 2191 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2197 2192 lines = displayer.hunk.pop(rev).split('\n')
2198 2193 if not lines[-1]:
2199 2194 del lines[-1]
2200 2195 displayer.flush(ctx)
2201 2196 edges = edgefn(type, char, lines, seen, rev, parents)
2202 2197 for type, char, lines, coldata in edges:
2203 2198 graphmod.ascii(ui, state, type, char, lines, coldata)
2204 2199 displayer.close()
2205 2200
2206 2201 def graphlog(ui, repo, *pats, **opts):
2207 2202 # Parameters are identical to log command ones
2208 2203 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2209 2204 revdag = graphmod.dagwalker(repo, revs)
2210 2205
2211 2206 getrenamed = None
2212 2207 if opts.get('copies'):
2213 2208 endrev = None
2214 2209 if opts.get('rev'):
2215 2210 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2216 2211 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2217 2212 displayer = show_changeset(ui, repo, opts, buffered=True)
2218 2213 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2219 2214 filematcher)
2220 2215
2221 2216 def checkunsupportedgraphflags(pats, opts):
2222 2217 for op in ["newest_first"]:
2223 2218 if op in opts and opts[op]:
2224 2219 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2225 2220 % op.replace("_", "-"))
2226 2221
2227 2222 def graphrevs(repo, nodes, opts):
2228 2223 limit = loglimit(opts)
2229 2224 nodes.reverse()
2230 2225 if limit is not None:
2231 2226 nodes = nodes[:limit]
2232 2227 return graphmod.nodes(repo, nodes)
2233 2228
2234 2229 def add(ui, repo, match, prefix, explicitonly, **opts):
2235 2230 join = lambda f: os.path.join(prefix, f)
2236 2231 bad = []
2237 2232
2238 2233 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2239 2234 names = []
2240 2235 wctx = repo[None]
2241 2236 cca = None
2242 2237 abort, warn = scmutil.checkportabilityalert(ui)
2243 2238 if abort or warn:
2244 2239 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2245 2240
2246 2241 badmatch = matchmod.badmatch(match, badfn)
2247 2242 dirstate = repo.dirstate
2248 2243 # We don't want to just call wctx.walk here, since it would return a lot of
2249 2244 # clean files, which we aren't interested in and takes time.
2250 2245 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2251 2246 True, False, full=False)):
2252 2247 exact = match.exact(f)
2253 2248 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2254 2249 if cca:
2255 2250 cca(f)
2256 2251 names.append(f)
2257 2252 if ui.verbose or not exact:
2258 2253 ui.status(_('adding %s\n') % match.rel(f))
2259 2254
2260 2255 for subpath in sorted(wctx.substate):
2261 2256 sub = wctx.sub(subpath)
2262 2257 try:
2263 2258 submatch = matchmod.narrowmatcher(subpath, match)
2264 2259 if opts.get('subrepos'):
2265 2260 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2266 2261 else:
2267 2262 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2268 2263 except error.LookupError:
2269 2264 ui.status(_("skipping missing subrepository: %s\n")
2270 2265 % join(subpath))
2271 2266
2272 2267 if not opts.get('dry_run'):
2273 2268 rejected = wctx.add(names, prefix)
2274 2269 bad.extend(f for f in rejected if f in match.files())
2275 2270 return bad
2276 2271
2277 2272 def forget(ui, repo, match, prefix, explicitonly):
2278 2273 join = lambda f: os.path.join(prefix, f)
2279 2274 bad = []
2280 2275 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2281 2276 wctx = repo[None]
2282 2277 forgot = []
2283 2278
2284 2279 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2285 2280 forget = sorted(s[0] + s[1] + s[3] + s[6])
2286 2281 if explicitonly:
2287 2282 forget = [f for f in forget if match.exact(f)]
2288 2283
2289 2284 for subpath in sorted(wctx.substate):
2290 2285 sub = wctx.sub(subpath)
2291 2286 try:
2292 2287 submatch = matchmod.narrowmatcher(subpath, match)
2293 2288 subbad, subforgot = sub.forget(submatch, prefix)
2294 2289 bad.extend([subpath + '/' + f for f in subbad])
2295 2290 forgot.extend([subpath + '/' + f for f in subforgot])
2296 2291 except error.LookupError:
2297 2292 ui.status(_("skipping missing subrepository: %s\n")
2298 2293 % join(subpath))
2299 2294
2300 2295 if not explicitonly:
2301 2296 for f in match.files():
2302 2297 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2303 2298 if f not in forgot:
2304 2299 if repo.wvfs.exists(f):
2305 2300 # Don't complain if the exact case match wasn't given.
2306 2301 # But don't do this until after checking 'forgot', so
2307 2302 # that subrepo files aren't normalized, and this op is
2308 2303 # purely from data cached by the status walk above.
2309 2304 if repo.dirstate.normalize(f) in repo.dirstate:
2310 2305 continue
2311 2306 ui.warn(_('not removing %s: '
2312 2307 'file is already untracked\n')
2313 2308 % match.rel(f))
2314 2309 bad.append(f)
2315 2310
2316 2311 for f in forget:
2317 2312 if ui.verbose or not match.exact(f):
2318 2313 ui.status(_('removing %s\n') % match.rel(f))
2319 2314
2320 2315 rejected = wctx.forget(forget, prefix)
2321 2316 bad.extend(f for f in rejected if f in match.files())
2322 2317 forgot.extend(f for f in forget if f not in rejected)
2323 2318 return bad, forgot
2324 2319
2325 2320 def files(ui, ctx, m, fm, fmt, subrepos):
2326 2321 rev = ctx.rev()
2327 2322 ret = 1
2328 2323 ds = ctx.repo().dirstate
2329 2324
2330 2325 for f in ctx.matches(m):
2331 2326 if rev is None and ds[f] == 'r':
2332 2327 continue
2333 2328 fm.startitem()
2334 2329 if ui.verbose:
2335 2330 fc = ctx[f]
2336 2331 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2337 2332 fm.data(abspath=f)
2338 2333 fm.write('path', fmt, m.rel(f))
2339 2334 ret = 0
2340 2335
2341 2336 for subpath in sorted(ctx.substate):
2342 2337 def matchessubrepo(subpath):
2343 2338 return (m.always() or m.exact(subpath)
2344 2339 or any(f.startswith(subpath + '/') for f in m.files()))
2345 2340
2346 2341 if subrepos or matchessubrepo(subpath):
2347 2342 sub = ctx.sub(subpath)
2348 2343 try:
2349 2344 submatch = matchmod.narrowmatcher(subpath, m)
2350 2345 if sub.printfiles(ui, submatch, fm, fmt, subrepos) == 0:
2351 2346 ret = 0
2352 2347 except error.LookupError:
2353 2348 ui.status(_("skipping missing subrepository: %s\n")
2354 2349 % m.abs(subpath))
2355 2350
2356 2351 return ret
2357 2352
2358 2353 def remove(ui, repo, m, prefix, after, force, subrepos):
2359 2354 join = lambda f: os.path.join(prefix, f)
2360 2355 ret = 0
2361 2356 s = repo.status(match=m, clean=True)
2362 2357 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2363 2358
2364 2359 wctx = repo[None]
2365 2360
2366 2361 for subpath in sorted(wctx.substate):
2367 2362 def matchessubrepo(matcher, subpath):
2368 2363 if matcher.exact(subpath):
2369 2364 return True
2370 2365 for f in matcher.files():
2371 2366 if f.startswith(subpath):
2372 2367 return True
2373 2368 return False
2374 2369
2375 2370 if subrepos or matchessubrepo(m, subpath):
2376 2371 sub = wctx.sub(subpath)
2377 2372 try:
2378 2373 submatch = matchmod.narrowmatcher(subpath, m)
2379 2374 if sub.removefiles(submatch, prefix, after, force, subrepos):
2380 2375 ret = 1
2381 2376 except error.LookupError:
2382 2377 ui.status(_("skipping missing subrepository: %s\n")
2383 2378 % join(subpath))
2384 2379
2385 2380 # warn about failure to delete explicit files/dirs
2386 2381 deleteddirs = util.dirs(deleted)
2387 2382 for f in m.files():
2388 2383 def insubrepo():
2389 2384 for subpath in wctx.substate:
2390 2385 if f.startswith(subpath):
2391 2386 return True
2392 2387 return False
2393 2388
2394 2389 isdir = f in deleteddirs or wctx.hasdir(f)
2395 2390 if f in repo.dirstate or isdir or f == '.' or insubrepo():
2396 2391 continue
2397 2392
2398 2393 if repo.wvfs.exists(f):
2399 2394 if repo.wvfs.isdir(f):
2400 2395 ui.warn(_('not removing %s: no tracked files\n')
2401 2396 % m.rel(f))
2402 2397 else:
2403 2398 ui.warn(_('not removing %s: file is untracked\n')
2404 2399 % m.rel(f))
2405 2400 # missing files will generate a warning elsewhere
2406 2401 ret = 1
2407 2402
2408 2403 if force:
2409 2404 list = modified + deleted + clean + added
2410 2405 elif after:
2411 2406 list = deleted
2412 2407 for f in modified + added + clean:
2413 2408 ui.warn(_('not removing %s: file still exists\n') % m.rel(f))
2414 2409 ret = 1
2415 2410 else:
2416 2411 list = deleted + clean
2417 2412 for f in modified:
2418 2413 ui.warn(_('not removing %s: file is modified (use -f'
2419 2414 ' to force removal)\n') % m.rel(f))
2420 2415 ret = 1
2421 2416 for f in added:
2422 2417 ui.warn(_('not removing %s: file has been marked for add'
2423 2418 ' (use forget to undo)\n') % m.rel(f))
2424 2419 ret = 1
2425 2420
2426 2421 for f in sorted(list):
2427 2422 if ui.verbose or not m.exact(f):
2428 2423 ui.status(_('removing %s\n') % m.rel(f))
2429 2424
2430 2425 wlock = repo.wlock()
2431 2426 try:
2432 2427 if not after:
2433 2428 for f in list:
2434 2429 if f in added:
2435 2430 continue # we never unlink added files on remove
2436 2431 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
2437 2432 repo[None].forget(list)
2438 2433 finally:
2439 2434 wlock.release()
2440 2435
2441 2436 return ret
2442 2437
2443 2438 def cat(ui, repo, ctx, matcher, prefix, **opts):
2444 2439 err = 1
2445 2440
2446 2441 def write(path):
2447 2442 fp = makefileobj(repo, opts.get('output'), ctx.node(),
2448 2443 pathname=os.path.join(prefix, path))
2449 2444 data = ctx[path].data()
2450 2445 if opts.get('decode'):
2451 2446 data = repo.wwritedata(path, data)
2452 2447 fp.write(data)
2453 2448 fp.close()
2454 2449
2455 2450 # Automation often uses hg cat on single files, so special case it
2456 2451 # for performance to avoid the cost of parsing the manifest.
2457 2452 if len(matcher.files()) == 1 and not matcher.anypats():
2458 2453 file = matcher.files()[0]
2459 2454 mf = repo.manifest
2460 2455 mfnode = ctx.manifestnode()
2461 2456 if mfnode and mf.find(mfnode, file)[0]:
2462 2457 write(file)
2463 2458 return 0
2464 2459
2465 2460 # Don't warn about "missing" files that are really in subrepos
2466 2461 def badfn(path, msg):
2467 2462 for subpath in ctx.substate:
2468 2463 if path.startswith(subpath):
2469 2464 return
2470 2465 matcher.bad(path, msg)
2471 2466
2472 2467 for abs in ctx.walk(matchmod.badmatch(matcher, badfn)):
2473 2468 write(abs)
2474 2469 err = 0
2475 2470
2476 2471 for subpath in sorted(ctx.substate):
2477 2472 sub = ctx.sub(subpath)
2478 2473 try:
2479 2474 submatch = matchmod.narrowmatcher(subpath, matcher)
2480 2475
2481 2476 if not sub.cat(submatch, os.path.join(prefix, sub._path),
2482 2477 **opts):
2483 2478 err = 0
2484 2479 except error.RepoLookupError:
2485 2480 ui.status(_("skipping missing subrepository: %s\n")
2486 2481 % os.path.join(prefix, subpath))
2487 2482
2488 2483 return err
2489 2484
2490 2485 def commit(ui, repo, commitfunc, pats, opts):
2491 2486 '''commit the specified files or all outstanding changes'''
2492 2487 date = opts.get('date')
2493 2488 if date:
2494 2489 opts['date'] = util.parsedate(date)
2495 2490 message = logmessage(ui, opts)
2496 2491 matcher = scmutil.match(repo[None], pats, opts)
2497 2492
2498 2493 # extract addremove carefully -- this function can be called from a command
2499 2494 # that doesn't support addremove
2500 2495 if opts.get('addremove'):
2501 2496 if scmutil.addremove(repo, matcher, "", opts) != 0:
2502 2497 raise error.Abort(
2503 2498 _("failed to mark all new/missing files as added/removed"))
2504 2499
2505 2500 return commitfunc(ui, repo, message, matcher, opts)
2506 2501
2507 2502 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2508 2503 # avoid cycle context -> subrepo -> cmdutil
2509 2504 import context
2510 2505
2511 2506 # amend will reuse the existing user if not specified, but the obsolete
2512 2507 # marker creation requires that the current user's name is specified.
2513 2508 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2514 2509 ui.username() # raise exception if username not set
2515 2510
2516 2511 ui.note(_('amending changeset %s\n') % old)
2517 2512 base = old.p1()
2518 2513 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2519 2514
2520 2515 wlock = lock = newid = None
2521 2516 try:
2522 2517 wlock = repo.wlock()
2523 2518 lock = repo.lock()
2524 2519 tr = repo.transaction('amend')
2525 2520 try:
2526 2521 # See if we got a message from -m or -l, if not, open the editor
2527 2522 # with the message of the changeset to amend
2528 2523 message = logmessage(ui, opts)
2529 2524 # ensure logfile does not conflict with later enforcement of the
2530 2525 # message. potential logfile content has been processed by
2531 2526 # `logmessage` anyway.
2532 2527 opts.pop('logfile')
2533 2528 # First, do a regular commit to record all changes in the working
2534 2529 # directory (if there are any)
2535 2530 ui.callhooks = False
2536 2531 activebookmark = repo._activebookmark
2537 2532 try:
2538 2533 repo._activebookmark = None
2539 2534 opts['message'] = 'temporary amend commit for %s' % old
2540 2535 node = commit(ui, repo, commitfunc, pats, opts)
2541 2536 finally:
2542 2537 repo._activebookmark = activebookmark
2543 2538 ui.callhooks = True
2544 2539 ctx = repo[node]
2545 2540
2546 2541 # Participating changesets:
2547 2542 #
2548 2543 # node/ctx o - new (intermediate) commit that contains changes
2549 2544 # | from working dir to go into amending commit
2550 2545 # | (or a workingctx if there were no changes)
2551 2546 # |
2552 2547 # old o - changeset to amend
2553 2548 # |
2554 2549 # base o - parent of amending changeset
2555 2550
2556 2551 # Update extra dict from amended commit (e.g. to preserve graft
2557 2552 # source)
2558 2553 extra.update(old.extra())
2559 2554
2560 2555 # Also update it from the intermediate commit or from the wctx
2561 2556 extra.update(ctx.extra())
2562 2557
2563 2558 if len(old.parents()) > 1:
2564 2559 # ctx.files() isn't reliable for merges, so fall back to the
2565 2560 # slower repo.status() method
2566 2561 files = set([fn for st in repo.status(base, old)[:3]
2567 2562 for fn in st])
2568 2563 else:
2569 2564 files = set(old.files())
2570 2565
2571 2566 # Second, we use either the commit we just did, or if there were no
2572 2567 # changes the parent of the working directory as the version of the
2573 2568 # files in the final amend commit
2574 2569 if node:
2575 2570 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2576 2571
2577 2572 user = ctx.user()
2578 2573 date = ctx.date()
2579 2574 # Recompute copies (avoid recording a -> b -> a)
2580 2575 copied = copies.pathcopies(base, ctx)
2581 2576 if old.p2:
2582 2577 copied.update(copies.pathcopies(old.p2(), ctx))
2583 2578
2584 2579 # Prune files which were reverted by the updates: if old
2585 2580 # introduced file X and our intermediate commit, node,
2586 2581 # renamed that file, then those two files are the same and
2587 2582 # we can discard X from our list of files. Likewise if X
2588 2583 # was deleted, it's no longer relevant
2589 2584 files.update(ctx.files())
2590 2585
2591 2586 def samefile(f):
2592 2587 if f in ctx.manifest():
2593 2588 a = ctx.filectx(f)
2594 2589 if f in base.manifest():
2595 2590 b = base.filectx(f)
2596 2591 return (not a.cmp(b)
2597 2592 and a.flags() == b.flags())
2598 2593 else:
2599 2594 return False
2600 2595 else:
2601 2596 return f not in base.manifest()
2602 2597 files = [f for f in files if not samefile(f)]
2603 2598
2604 2599 def filectxfn(repo, ctx_, path):
2605 2600 try:
2606 2601 fctx = ctx[path]
2607 2602 flags = fctx.flags()
2608 2603 mctx = context.memfilectx(repo,
2609 2604 fctx.path(), fctx.data(),
2610 2605 islink='l' in flags,
2611 2606 isexec='x' in flags,
2612 2607 copied=copied.get(path))
2613 2608 return mctx
2614 2609 except KeyError:
2615 2610 return None
2616 2611 else:
2617 2612 ui.note(_('copying changeset %s to %s\n') % (old, base))
2618 2613
2619 2614 # Use version of files as in the old cset
2620 2615 def filectxfn(repo, ctx_, path):
2621 2616 try:
2622 2617 return old.filectx(path)
2623 2618 except KeyError:
2624 2619 return None
2625 2620
2626 2621 user = opts.get('user') or old.user()
2627 2622 date = opts.get('date') or old.date()
2628 2623 editform = mergeeditform(old, 'commit.amend')
2629 2624 editor = getcommiteditor(editform=editform, **opts)
2630 2625 if not message:
2631 2626 editor = getcommiteditor(edit=True, editform=editform)
2632 2627 message = old.description()
2633 2628
2634 2629 pureextra = extra.copy()
2635 2630 if 'amend_source' in pureextra:
2636 2631 del pureextra['amend_source']
2637 2632 pureoldextra = old.extra()
2638 2633 if 'amend_source' in pureoldextra:
2639 2634 del pureoldextra['amend_source']
2640 2635 extra['amend_source'] = old.hex()
2641 2636
2642 2637 new = context.memctx(repo,
2643 2638 parents=[base.node(), old.p2().node()],
2644 2639 text=message,
2645 2640 files=files,
2646 2641 filectxfn=filectxfn,
2647 2642 user=user,
2648 2643 date=date,
2649 2644 extra=extra,
2650 2645 editor=editor)
2651 2646
2652 2647 newdesc = changelog.stripdesc(new.description())
2653 2648 if ((not node)
2654 2649 and newdesc == old.description()
2655 2650 and user == old.user()
2656 2651 and date == old.date()
2657 2652 and pureextra == pureoldextra):
2658 2653 # nothing changed. continuing here would create a new node
2659 2654 # anyway because of the amend_source noise.
2660 2655 #
2661 2656 # This not what we expect from amend.
2662 2657 return old.node()
2663 2658
2664 2659 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2665 2660 try:
2666 2661 if opts.get('secret'):
2667 2662 commitphase = 'secret'
2668 2663 else:
2669 2664 commitphase = old.phase()
2670 2665 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2671 2666 newid = repo.commitctx(new)
2672 2667 finally:
2673 2668 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2674 2669 if newid != old.node():
2675 2670 # Reroute the working copy parent to the new changeset
2676 2671 repo.setparents(newid, nullid)
2677 2672
2678 2673 # Move bookmarks from old parent to amend commit
2679 2674 bms = repo.nodebookmarks(old.node())
2680 2675 if bms:
2681 2676 marks = repo._bookmarks
2682 2677 for bm in bms:
2683 2678 ui.debug('moving bookmarks %r from %s to %s\n' %
2684 2679 (marks, old.hex(), hex(newid)))
2685 2680 marks[bm] = newid
2686 2681 marks.recordchange(tr)
2687 2682 #commit the whole amend process
2688 2683 if createmarkers:
2689 2684 # mark the new changeset as successor of the rewritten one
2690 2685 new = repo[newid]
2691 2686 obs = [(old, (new,))]
2692 2687 if node:
2693 2688 obs.append((ctx, ()))
2694 2689
2695 2690 obsolete.createmarkers(repo, obs)
2696 2691 tr.close()
2697 2692 finally:
2698 2693 tr.release()
2699 2694 if not createmarkers and newid != old.node():
2700 2695 # Strip the intermediate commit (if there was one) and the amended
2701 2696 # commit
2702 2697 if node:
2703 2698 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2704 2699 ui.note(_('stripping amended changeset %s\n') % old)
2705 2700 repair.strip(ui, repo, old.node(), topic='amend-backup')
2706 2701 finally:
2707 2702 lockmod.release(lock, wlock)
2708 2703 return newid
2709 2704
2710 2705 def commiteditor(repo, ctx, subs, editform=''):
2711 2706 if ctx.description():
2712 2707 return ctx.description()
2713 2708 return commitforceeditor(repo, ctx, subs, editform=editform,
2714 2709 unchangedmessagedetection=True)
2715 2710
2716 2711 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2717 2712 editform='', unchangedmessagedetection=False):
2718 2713 if not extramsg:
2719 2714 extramsg = _("Leave message empty to abort commit.")
2720 2715
2721 2716 forms = [e for e in editform.split('.') if e]
2722 2717 forms.insert(0, 'changeset')
2723 2718 templatetext = None
2724 2719 while forms:
2725 2720 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2726 2721 if tmpl:
2727 2722 templatetext = committext = buildcommittemplate(
2728 2723 repo, ctx, subs, extramsg, tmpl)
2729 2724 break
2730 2725 forms.pop()
2731 2726 else:
2732 2727 committext = buildcommittext(repo, ctx, subs, extramsg)
2733 2728
2734 2729 # run editor in the repository root
2735 2730 olddir = os.getcwd()
2736 2731 os.chdir(repo.root)
2737 2732
2738 2733 # make in-memory changes visible to external process
2739 2734 tr = repo.currenttransaction()
2740 2735 repo.dirstate.write(tr)
2741 2736 pending = tr and tr.writepending() and repo.root
2742 2737
2743 2738 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2744 2739 editform=editform, pending=pending)
2745 2740 text = re.sub("(?m)^HG:.*(\n|$)", "", editortext)
2746 2741 os.chdir(olddir)
2747 2742
2748 2743 if finishdesc:
2749 2744 text = finishdesc(text)
2750 2745 if not text.strip():
2751 2746 raise error.Abort(_("empty commit message"))
2752 2747 if unchangedmessagedetection and editortext == templatetext:
2753 2748 raise error.Abort(_("commit message unchanged"))
2754 2749
2755 2750 return text
2756 2751
2757 2752 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2758 2753 ui = repo.ui
2759 2754 tmpl, mapfile = gettemplate(ui, tmpl, None)
2760 2755
2761 2756 try:
2762 2757 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2763 2758 except SyntaxError as inst:
2764 2759 raise error.Abort(inst.args[0])
2765 2760
2766 2761 for k, v in repo.ui.configitems('committemplate'):
2767 2762 if k != 'changeset':
2768 2763 t.t.cache[k] = v
2769 2764
2770 2765 if not extramsg:
2771 2766 extramsg = '' # ensure that extramsg is string
2772 2767
2773 2768 ui.pushbuffer()
2774 2769 t.show(ctx, extramsg=extramsg)
2775 2770 return ui.popbuffer()
2776 2771
2777 2772 def hgprefix(msg):
2778 2773 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2779 2774
2780 2775 def buildcommittext(repo, ctx, subs, extramsg):
2781 2776 edittext = []
2782 2777 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2783 2778 if ctx.description():
2784 2779 edittext.append(ctx.description())
2785 2780 edittext.append("")
2786 2781 edittext.append("") # Empty line between message and comments.
2787 2782 edittext.append(hgprefix(_("Enter commit message."
2788 2783 " Lines beginning with 'HG:' are removed.")))
2789 2784 edittext.append(hgprefix(extramsg))
2790 2785 edittext.append("HG: --")
2791 2786 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2792 2787 if ctx.p2():
2793 2788 edittext.append(hgprefix(_("branch merge")))
2794 2789 if ctx.branch():
2795 2790 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2796 2791 if bookmarks.isactivewdirparent(repo):
2797 2792 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2798 2793 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2799 2794 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2800 2795 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2801 2796 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2802 2797 if not added and not modified and not removed:
2803 2798 edittext.append(hgprefix(_("no files changed")))
2804 2799 edittext.append("")
2805 2800
2806 2801 return "\n".join(edittext)
2807 2802
2808 2803 def commitstatus(repo, node, branch, bheads=None, opts=None):
2809 2804 if opts is None:
2810 2805 opts = {}
2811 2806 ctx = repo[node]
2812 2807 parents = ctx.parents()
2813 2808
2814 2809 if (not opts.get('amend') and bheads and node not in bheads and not
2815 2810 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2816 2811 repo.ui.status(_('created new head\n'))
2817 2812 # The message is not printed for initial roots. For the other
2818 2813 # changesets, it is printed in the following situations:
2819 2814 #
2820 2815 # Par column: for the 2 parents with ...
2821 2816 # N: null or no parent
2822 2817 # B: parent is on another named branch
2823 2818 # C: parent is a regular non head changeset
2824 2819 # H: parent was a branch head of the current branch
2825 2820 # Msg column: whether we print "created new head" message
2826 2821 # In the following, it is assumed that there already exists some
2827 2822 # initial branch heads of the current branch, otherwise nothing is
2828 2823 # printed anyway.
2829 2824 #
2830 2825 # Par Msg Comment
2831 2826 # N N y additional topo root
2832 2827 #
2833 2828 # B N y additional branch root
2834 2829 # C N y additional topo head
2835 2830 # H N n usual case
2836 2831 #
2837 2832 # B B y weird additional branch root
2838 2833 # C B y branch merge
2839 2834 # H B n merge with named branch
2840 2835 #
2841 2836 # C C y additional head from merge
2842 2837 # C H n merge with a head
2843 2838 #
2844 2839 # H H n head merge: head count decreases
2845 2840
2846 2841 if not opts.get('close_branch'):
2847 2842 for r in parents:
2848 2843 if r.closesbranch() and r.branch() == branch:
2849 2844 repo.ui.status(_('reopening closed branch head %d\n') % r)
2850 2845
2851 2846 if repo.ui.debugflag:
2852 2847 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2853 2848 elif repo.ui.verbose:
2854 2849 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2855 2850
2856 2851 def revert(ui, repo, ctx, parents, *pats, **opts):
2857 2852 parent, p2 = parents
2858 2853 node = ctx.node()
2859 2854
2860 2855 mf = ctx.manifest()
2861 2856 if node == p2:
2862 2857 parent = p2
2863 2858 if node == parent:
2864 2859 pmf = mf
2865 2860 else:
2866 2861 pmf = None
2867 2862
2868 2863 # need all matching names in dirstate and manifest of target rev,
2869 2864 # so have to walk both. do not print errors if files exist in one
2870 2865 # but not other. in both cases, filesets should be evaluated against
2871 2866 # workingctx to get consistent result (issue4497). this means 'set:**'
2872 2867 # cannot be used to select missing files from target rev.
2873 2868
2874 2869 # `names` is a mapping for all elements in working copy and target revision
2875 2870 # The mapping is in the form:
2876 2871 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2877 2872 names = {}
2878 2873
2879 2874 wlock = repo.wlock()
2880 2875 try:
2881 2876 ## filling of the `names` mapping
2882 2877 # walk dirstate to fill `names`
2883 2878
2884 2879 interactive = opts.get('interactive', False)
2885 2880 wctx = repo[None]
2886 2881 m = scmutil.match(wctx, pats, opts)
2887 2882
2888 2883 # we'll need this later
2889 2884 targetsubs = sorted(s for s in wctx.substate if m(s))
2890 2885
2891 2886 if not m.always():
2892 2887 for abs in repo.walk(matchmod.badmatch(m, lambda x, y: False)):
2893 2888 names[abs] = m.rel(abs), m.exact(abs)
2894 2889
2895 2890 # walk target manifest to fill `names`
2896 2891
2897 2892 def badfn(path, msg):
2898 2893 if path in names:
2899 2894 return
2900 2895 if path in ctx.substate:
2901 2896 return
2902 2897 path_ = path + '/'
2903 2898 for f in names:
2904 2899 if f.startswith(path_):
2905 2900 return
2906 2901 ui.warn("%s: %s\n" % (m.rel(path), msg))
2907 2902
2908 2903 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
2909 2904 if abs not in names:
2910 2905 names[abs] = m.rel(abs), m.exact(abs)
2911 2906
2912 2907 # Find status of all file in `names`.
2913 2908 m = scmutil.matchfiles(repo, names)
2914 2909
2915 2910 changes = repo.status(node1=node, match=m,
2916 2911 unknown=True, ignored=True, clean=True)
2917 2912 else:
2918 2913 changes = repo.status(node1=node, match=m)
2919 2914 for kind in changes:
2920 2915 for abs in kind:
2921 2916 names[abs] = m.rel(abs), m.exact(abs)
2922 2917
2923 2918 m = scmutil.matchfiles(repo, names)
2924 2919
2925 2920 modified = set(changes.modified)
2926 2921 added = set(changes.added)
2927 2922 removed = set(changes.removed)
2928 2923 _deleted = set(changes.deleted)
2929 2924 unknown = set(changes.unknown)
2930 2925 unknown.update(changes.ignored)
2931 2926 clean = set(changes.clean)
2932 2927 modadded = set()
2933 2928
2934 2929 # split between files known in target manifest and the others
2935 2930 smf = set(mf)
2936 2931
2937 2932 # determine the exact nature of the deleted changesets
2938 2933 deladded = _deleted - smf
2939 2934 deleted = _deleted - deladded
2940 2935
2941 2936 # We need to account for the state of the file in the dirstate,
2942 2937 # even when we revert against something else than parent. This will
2943 2938 # slightly alter the behavior of revert (doing back up or not, delete
2944 2939 # or just forget etc).
2945 2940 if parent == node:
2946 2941 dsmodified = modified
2947 2942 dsadded = added
2948 2943 dsremoved = removed
2949 2944 # store all local modifications, useful later for rename detection
2950 2945 localchanges = dsmodified | dsadded
2951 2946 modified, added, removed = set(), set(), set()
2952 2947 else:
2953 2948 changes = repo.status(node1=parent, match=m)
2954 2949 dsmodified = set(changes.modified)
2955 2950 dsadded = set(changes.added)
2956 2951 dsremoved = set(changes.removed)
2957 2952 # store all local modifications, useful later for rename detection
2958 2953 localchanges = dsmodified | dsadded
2959 2954
2960 2955 # only take into account for removes between wc and target
2961 2956 clean |= dsremoved - removed
2962 2957 dsremoved &= removed
2963 2958 # distinct between dirstate remove and other
2964 2959 removed -= dsremoved
2965 2960
2966 2961 modadded = added & dsmodified
2967 2962 added -= modadded
2968 2963
2969 2964 # tell newly modified apart.
2970 2965 dsmodified &= modified
2971 2966 dsmodified |= modified & dsadded # dirstate added may needs backup
2972 2967 modified -= dsmodified
2973 2968
2974 2969 # We need to wait for some post-processing to update this set
2975 2970 # before making the distinction. The dirstate will be used for
2976 2971 # that purpose.
2977 2972 dsadded = added
2978 2973
2979 2974 # in case of merge, files that are actually added can be reported as
2980 2975 # modified, we need to post process the result
2981 2976 if p2 != nullid:
2982 2977 if pmf is None:
2983 2978 # only need parent manifest in the merge case,
2984 2979 # so do not read by default
2985 2980 pmf = repo[parent].manifest()
2986 2981 mergeadd = dsmodified - set(pmf)
2987 2982 dsadded |= mergeadd
2988 2983 dsmodified -= mergeadd
2989 2984
2990 2985 # if f is a rename, update `names` to also revert the source
2991 2986 cwd = repo.getcwd()
2992 2987 for f in localchanges:
2993 2988 src = repo.dirstate.copied(f)
2994 2989 # XXX should we check for rename down to target node?
2995 2990 if src and src not in names and repo.dirstate[src] == 'r':
2996 2991 dsremoved.add(src)
2997 2992 names[src] = (repo.pathto(src, cwd), True)
2998 2993
2999 2994 # distinguish between file to forget and the other
3000 2995 added = set()
3001 2996 for abs in dsadded:
3002 2997 if repo.dirstate[abs] != 'a':
3003 2998 added.add(abs)
3004 2999 dsadded -= added
3005 3000
3006 3001 for abs in deladded:
3007 3002 if repo.dirstate[abs] == 'a':
3008 3003 dsadded.add(abs)
3009 3004 deladded -= dsadded
3010 3005
3011 3006 # For files marked as removed, we check if an unknown file is present at
3012 3007 # the same path. If a such file exists it may need to be backed up.
3013 3008 # Making the distinction at this stage helps have simpler backup
3014 3009 # logic.
3015 3010 removunk = set()
3016 3011 for abs in removed:
3017 3012 target = repo.wjoin(abs)
3018 3013 if os.path.lexists(target):
3019 3014 removunk.add(abs)
3020 3015 removed -= removunk
3021 3016
3022 3017 dsremovunk = set()
3023 3018 for abs in dsremoved:
3024 3019 target = repo.wjoin(abs)
3025 3020 if os.path.lexists(target):
3026 3021 dsremovunk.add(abs)
3027 3022 dsremoved -= dsremovunk
3028 3023
3029 3024 # action to be actually performed by revert
3030 3025 # (<list of file>, message>) tuple
3031 3026 actions = {'revert': ([], _('reverting %s\n')),
3032 3027 'add': ([], _('adding %s\n')),
3033 3028 'remove': ([], _('removing %s\n')),
3034 3029 'drop': ([], _('removing %s\n')),
3035 3030 'forget': ([], _('forgetting %s\n')),
3036 3031 'undelete': ([], _('undeleting %s\n')),
3037 3032 'noop': (None, _('no changes needed to %s\n')),
3038 3033 'unknown': (None, _('file not managed: %s\n')),
3039 3034 }
3040 3035
3041 3036 # "constant" that convey the backup strategy.
3042 3037 # All set to `discard` if `no-backup` is set do avoid checking
3043 3038 # no_backup lower in the code.
3044 3039 # These values are ordered for comparison purposes
3045 3040 backup = 2 # unconditionally do backup
3046 3041 check = 1 # check if the existing file differs from target
3047 3042 discard = 0 # never do backup
3048 3043 if opts.get('no_backup'):
3049 3044 backup = check = discard
3050 3045
3051 3046 backupanddel = actions['remove']
3052 3047 if not opts.get('no_backup'):
3053 3048 backupanddel = actions['drop']
3054 3049
3055 3050 disptable = (
3056 3051 # dispatch table:
3057 3052 # file state
3058 3053 # action
3059 3054 # make backup
3060 3055
3061 3056 ## Sets that results that will change file on disk
3062 3057 # Modified compared to target, no local change
3063 3058 (modified, actions['revert'], discard),
3064 3059 # Modified compared to target, but local file is deleted
3065 3060 (deleted, actions['revert'], discard),
3066 3061 # Modified compared to target, local change
3067 3062 (dsmodified, actions['revert'], backup),
3068 3063 # Added since target
3069 3064 (added, actions['remove'], discard),
3070 3065 # Added in working directory
3071 3066 (dsadded, actions['forget'], discard),
3072 3067 # Added since target, have local modification
3073 3068 (modadded, backupanddel, backup),
3074 3069 # Added since target but file is missing in working directory
3075 3070 (deladded, actions['drop'], discard),
3076 3071 # Removed since target, before working copy parent
3077 3072 (removed, actions['add'], discard),
3078 3073 # Same as `removed` but an unknown file exists at the same path
3079 3074 (removunk, actions['add'], check),
3080 3075 # Removed since targe, marked as such in working copy parent
3081 3076 (dsremoved, actions['undelete'], discard),
3082 3077 # Same as `dsremoved` but an unknown file exists at the same path
3083 3078 (dsremovunk, actions['undelete'], check),
3084 3079 ## the following sets does not result in any file changes
3085 3080 # File with no modification
3086 3081 (clean, actions['noop'], discard),
3087 3082 # Existing file, not tracked anywhere
3088 3083 (unknown, actions['unknown'], discard),
3089 3084 )
3090 3085
3091 3086 for abs, (rel, exact) in sorted(names.items()):
3092 3087 # target file to be touch on disk (relative to cwd)
3093 3088 target = repo.wjoin(abs)
3094 3089 # search the entry in the dispatch table.
3095 3090 # if the file is in any of these sets, it was touched in the working
3096 3091 # directory parent and we are sure it needs to be reverted.
3097 3092 for table, (xlist, msg), dobackup in disptable:
3098 3093 if abs not in table:
3099 3094 continue
3100 3095 if xlist is not None:
3101 3096 xlist.append(abs)
3102 3097 if dobackup and (backup <= dobackup
3103 3098 or wctx[abs].cmp(ctx[abs])):
3104 3099 bakname = origpath(ui, repo, rel)
3105 3100 ui.note(_('saving current version of %s as %s\n') %
3106 3101 (rel, bakname))
3107 3102 if not opts.get('dry_run'):
3108 3103 if interactive:
3109 3104 util.copyfile(target, bakname)
3110 3105 else:
3111 3106 util.rename(target, bakname)
3112 3107 if ui.verbose or not exact:
3113 3108 if not isinstance(msg, basestring):
3114 3109 msg = msg(abs)
3115 3110 ui.status(msg % rel)
3116 3111 elif exact:
3117 3112 ui.warn(msg % rel)
3118 3113 break
3119 3114
3120 3115 if not opts.get('dry_run'):
3121 3116 needdata = ('revert', 'add', 'undelete')
3122 3117 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3123 3118 _performrevert(repo, parents, ctx, actions, interactive)
3124 3119
3125 3120 if targetsubs:
3126 3121 # Revert the subrepos on the revert list
3127 3122 for sub in targetsubs:
3128 3123 try:
3129 3124 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3130 3125 except KeyError:
3131 3126 raise error.Abort("subrepository '%s' does not exist in %s!"
3132 3127 % (sub, short(ctx.node())))
3133 3128 finally:
3134 3129 wlock.release()
3135 3130
3136 3131 def origpath(ui, repo, filepath):
3137 3132 '''customize where .orig files are created
3138 3133
3139 3134 Fetch user defined path from config file: [ui] origbackuppath = <path>
3140 3135 Fall back to default (filepath) if not specified
3141 3136 '''
3142 3137 origbackuppath = ui.config('ui', 'origbackuppath', None)
3143 3138 if origbackuppath is None:
3144 3139 return filepath + ".orig"
3145 3140
3146 3141 filepathfromroot = os.path.relpath(filepath, start=repo.root)
3147 3142 fullorigpath = repo.wjoin(origbackuppath, filepathfromroot)
3148 3143
3149 3144 origbackupdir = repo.vfs.dirname(fullorigpath)
3150 3145 if not repo.vfs.exists(origbackupdir):
3151 3146 ui.note(_('creating directory: %s\n') % origbackupdir)
3152 3147 util.makedirs(origbackupdir)
3153 3148
3154 3149 return fullorigpath + ".orig"
3155 3150
3156 3151 def _revertprefetch(repo, ctx, *files):
3157 3152 """Let extension changing the storage layer prefetch content"""
3158 3153 pass
3159 3154
3160 3155 def _performrevert(repo, parents, ctx, actions, interactive=False):
3161 3156 """function that actually perform all the actions computed for revert
3162 3157
3163 3158 This is an independent function to let extension to plug in and react to
3164 3159 the imminent revert.
3165 3160
3166 3161 Make sure you have the working directory locked when calling this function.
3167 3162 """
3168 3163 parent, p2 = parents
3169 3164 node = ctx.node()
3170 3165 def checkout(f):
3171 3166 fc = ctx[f]
3172 3167 repo.wwrite(f, fc.data(), fc.flags())
3173 3168
3174 3169 audit_path = pathutil.pathauditor(repo.root)
3175 3170 for f in actions['forget'][0]:
3176 3171 repo.dirstate.drop(f)
3177 3172 for f in actions['remove'][0]:
3178 3173 audit_path(f)
3179 3174 try:
3180 3175 util.unlinkpath(repo.wjoin(f))
3181 3176 except OSError:
3182 3177 pass
3183 3178 repo.dirstate.remove(f)
3184 3179 for f in actions['drop'][0]:
3185 3180 audit_path(f)
3186 3181 repo.dirstate.remove(f)
3187 3182
3188 3183 normal = None
3189 3184 if node == parent:
3190 3185 # We're reverting to our parent. If possible, we'd like status
3191 3186 # to report the file as clean. We have to use normallookup for
3192 3187 # merges to avoid losing information about merged/dirty files.
3193 3188 if p2 != nullid:
3194 3189 normal = repo.dirstate.normallookup
3195 3190 else:
3196 3191 normal = repo.dirstate.normal
3197 3192
3198 3193 newlyaddedandmodifiedfiles = set()
3199 3194 if interactive:
3200 3195 # Prompt the user for changes to revert
3201 3196 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3202 3197 m = scmutil.match(ctx, torevert, {})
3203 3198 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3204 3199 diffopts.nodates = True
3205 3200 diffopts.git = True
3206 3201 reversehunks = repo.ui.configbool('experimental',
3207 3202 'revertalternateinteractivemode',
3208 3203 True)
3209 3204 if reversehunks:
3210 3205 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3211 3206 else:
3212 3207 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3213 3208 originalchunks = patch.parsepatch(diff)
3214 3209
3215 3210 try:
3216 3211
3217 3212 chunks, opts = recordfilter(repo.ui, originalchunks)
3218 3213 if reversehunks:
3219 3214 chunks = patch.reversehunks(chunks)
3220 3215
3221 3216 except patch.PatchError as err:
3222 3217 raise error.Abort(_('error parsing patch: %s') % err)
3223 3218
3224 3219 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3225 3220 # Apply changes
3226 3221 fp = cStringIO.StringIO()
3227 3222 for c in chunks:
3228 3223 c.write(fp)
3229 3224 dopatch = fp.tell()
3230 3225 fp.seek(0)
3231 3226 if dopatch:
3232 3227 try:
3233 3228 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3234 3229 except patch.PatchError as err:
3235 3230 raise error.Abort(str(err))
3236 3231 del fp
3237 3232 else:
3238 3233 for f in actions['revert'][0]:
3239 3234 checkout(f)
3240 3235 if normal:
3241 3236 normal(f)
3242 3237
3243 3238 for f in actions['add'][0]:
3244 3239 # Don't checkout modified files, they are already created by the diff
3245 3240 if f not in newlyaddedandmodifiedfiles:
3246 3241 checkout(f)
3247 3242 repo.dirstate.add(f)
3248 3243
3249 3244 normal = repo.dirstate.normallookup
3250 3245 if node == parent and p2 == nullid:
3251 3246 normal = repo.dirstate.normal
3252 3247 for f in actions['undelete'][0]:
3253 3248 checkout(f)
3254 3249 normal(f)
3255 3250
3256 3251 copied = copies.pathcopies(repo[parent], ctx)
3257 3252
3258 3253 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3259 3254 if f in copied:
3260 3255 repo.dirstate.copy(copied[f], f)
3261 3256
3262 3257 def command(table):
3263 3258 """Returns a function object to be used as a decorator for making commands.
3264 3259
3265 3260 This function receives a command table as its argument. The table should
3266 3261 be a dict.
3267 3262
3268 3263 The returned function can be used as a decorator for adding commands
3269 3264 to that command table. This function accepts multiple arguments to define
3270 3265 a command.
3271 3266
3272 3267 The first argument is the command name.
3273 3268
3274 3269 The options argument is an iterable of tuples defining command arguments.
3275 3270 See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
3276 3271
3277 3272 The synopsis argument defines a short, one line summary of how to use the
3278 3273 command. This shows up in the help output.
3279 3274
3280 3275 The norepo argument defines whether the command does not require a
3281 3276 local repository. Most commands operate against a repository, thus the
3282 3277 default is False.
3283 3278
3284 3279 The optionalrepo argument defines whether the command optionally requires
3285 3280 a local repository.
3286 3281
3287 3282 The inferrepo argument defines whether to try to find a repository from the
3288 3283 command line arguments. If True, arguments will be examined for potential
3289 3284 repository locations. See ``findrepo()``. If a repository is found, it
3290 3285 will be used.
3291 3286 """
3292 3287 def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False,
3293 3288 inferrepo=False):
3294 3289 def decorator(func):
3295 3290 if synopsis:
3296 3291 table[name] = func, list(options), synopsis
3297 3292 else:
3298 3293 table[name] = func, list(options)
3299 3294
3300 3295 if norepo:
3301 3296 # Avoid import cycle.
3302 3297 import commands
3303 3298 commands.norepo += ' %s' % ' '.join(parsealiases(name))
3304 3299
3305 3300 if optionalrepo:
3306 3301 import commands
3307 3302 commands.optionalrepo += ' %s' % ' '.join(parsealiases(name))
3308 3303
3309 3304 if inferrepo:
3310 3305 import commands
3311 3306 commands.inferrepo += ' %s' % ' '.join(parsealiases(name))
3312 3307
3313 3308 return func
3314 3309 return decorator
3315 3310
3316 3311 return cmd
3317 3312
3318 3313 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3319 3314 # commands.outgoing. "missing" is "missing" of the result of
3320 3315 # "findcommonoutgoing()"
3321 3316 outgoinghooks = util.hooks()
3322 3317
3323 3318 # a list of (ui, repo) functions called by commands.summary
3324 3319 summaryhooks = util.hooks()
3325 3320
3326 3321 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3327 3322 #
3328 3323 # functions should return tuple of booleans below, if 'changes' is None:
3329 3324 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3330 3325 #
3331 3326 # otherwise, 'changes' is a tuple of tuples below:
3332 3327 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3333 3328 # - (desturl, destbranch, destpeer, outgoing)
3334 3329 summaryremotehooks = util.hooks()
3335 3330
3336 3331 # A list of state files kept by multistep operations like graft.
3337 3332 # Since graft cannot be aborted, it is considered 'clearable' by update.
3338 3333 # note: bisect is intentionally excluded
3339 3334 # (state file, clearable, allowcommit, error, hint)
3340 3335 unfinishedstates = [
3341 3336 ('graftstate', True, False, _('graft in progress'),
3342 3337 _("use 'hg graft --continue' or 'hg update' to abort")),
3343 3338 ('updatestate', True, False, _('last update was interrupted'),
3344 3339 _("use 'hg update' to get a consistent checkout"))
3345 3340 ]
3346 3341
3347 3342 def checkunfinished(repo, commit=False):
3348 3343 '''Look for an unfinished multistep operation, like graft, and abort
3349 3344 if found. It's probably good to check this right before
3350 3345 bailifchanged().
3351 3346 '''
3352 3347 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3353 3348 if commit and allowcommit:
3354 3349 continue
3355 3350 if repo.vfs.exists(f):
3356 3351 raise error.Abort(msg, hint=hint)
3357 3352
3358 3353 def clearunfinished(repo):
3359 3354 '''Check for unfinished operations (as above), and clear the ones
3360 3355 that are clearable.
3361 3356 '''
3362 3357 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3363 3358 if not clearable and repo.vfs.exists(f):
3364 3359 raise error.Abort(msg, hint=hint)
3365 3360 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3366 3361 if clearable and repo.vfs.exists(f):
3367 3362 util.unlink(repo.join(f))
3368 3363
3369 3364 class dirstateguard(object):
3370 3365 '''Restore dirstate at unexpected failure.
3371 3366
3372 3367 At the construction, this class does:
3373 3368
3374 3369 - write current ``repo.dirstate`` out, and
3375 3370 - save ``.hg/dirstate`` into the backup file
3376 3371
3377 3372 This restores ``.hg/dirstate`` from backup file, if ``release()``
3378 3373 is invoked before ``close()``.
3379 3374
3380 3375 This just removes the backup file at ``close()`` before ``release()``.
3381 3376 '''
3382 3377
3383 3378 def __init__(self, repo, name):
3384 3379 self._repo = repo
3385 3380 self._suffix = '.backup.%s.%d' % (name, id(self))
3386 3381 repo.dirstate._savebackup(repo.currenttransaction(), self._suffix)
3387 3382 self._active = True
3388 3383 self._closed = False
3389 3384
3390 3385 def __del__(self):
3391 3386 if self._active: # still active
3392 3387 # this may occur, even if this class is used correctly:
3393 3388 # for example, releasing other resources like transaction
3394 3389 # may raise exception before ``dirstateguard.release`` in
3395 3390 # ``release(tr, ....)``.
3396 3391 self._abort()
3397 3392
3398 3393 def close(self):
3399 3394 if not self._active: # already inactivated
3400 3395 msg = (_("can't close already inactivated backup: dirstate%s")
3401 3396 % self._suffix)
3402 3397 raise error.Abort(msg)
3403 3398
3404 3399 self._repo.dirstate._clearbackup(self._repo.currenttransaction(),
3405 3400 self._suffix)
3406 3401 self._active = False
3407 3402 self._closed = True
3408 3403
3409 3404 def _abort(self):
3410 3405 self._repo.dirstate._restorebackup(self._repo.currenttransaction(),
3411 3406 self._suffix)
3412 3407 self._active = False
3413 3408
3414 3409 def release(self):
3415 3410 if not self._closed:
3416 3411 if not self._active: # already inactivated
3417 3412 msg = (_("can't release already inactivated backup:"
3418 3413 " dirstate%s")
3419 3414 % self._suffix)
3420 3415 raise error.Abort(msg)
3421 3416 self._abort()
@@ -1,224 +1,224 b''
1 1 $ hg init repo
2 2 $ cd repo
3 3 $ touch foo
4 4 $ hg add foo
5 5 $ for i in 0 1 2 3 4 5 6 7 8 9 10 11; do
6 6 > echo "foo-$i" >> foo
7 7 > hg ci -m "foo-$i"
8 8 > done
9 9
10 10 $ for out in "%nof%N" "%%%H" "%b-%R" "%h" "%r" "%m"; do
11 11 > echo
12 12 > echo "# foo-$out.patch"
13 13 > hg export -v -o "foo-$out.patch" 2:tip
14 14 > done
15 15
16 16 # foo-%nof%N.patch
17 17 exporting patches:
18 18 foo-01of10.patch
19 19 foo-02of10.patch
20 20 foo-03of10.patch
21 21 foo-04of10.patch
22 22 foo-05of10.patch
23 23 foo-06of10.patch
24 24 foo-07of10.patch
25 25 foo-08of10.patch
26 26 foo-09of10.patch
27 27 foo-10of10.patch
28 28
29 29 # foo-%%%H.patch
30 30 exporting patches:
31 31 foo-%617188a1c80f869a7b66c85134da88a6fb145f67.patch
32 32 foo-%dd41a5ff707a5225204105611ba49cc5c229d55f.patch
33 33 foo-%f95a5410f8664b6e1490a4af654e4b7d41a7b321.patch
34 34 foo-%4346bcfde53b4d9042489078bcfa9c3e28201db2.patch
35 35 foo-%afda8c3a009cc99449a05ad8aa4655648c4ecd34.patch
36 36 foo-%35284ce2b6b99c9d2ac66268fe99e68e1974e1aa.patch
37 37 foo-%9688c41894e6931305fa7165a37f6568050b4e9b.patch
38 38 foo-%747d3c68f8ec44bb35816bfcd59aeb50b9654c2f.patch
39 39 foo-%5f17a83f5fbd9414006a5e563eab4c8a00729efd.patch
40 40 foo-%f3acbafac161ec68f1598af38f794f28847ca5d3.patch
41 41
42 42 # foo-%b-%R.patch
43 43 exporting patches:
44 44 foo-repo-2.patch
45 45 foo-repo-3.patch
46 46 foo-repo-4.patch
47 47 foo-repo-5.patch
48 48 foo-repo-6.patch
49 49 foo-repo-7.patch
50 50 foo-repo-8.patch
51 51 foo-repo-9.patch
52 52 foo-repo-10.patch
53 53 foo-repo-11.patch
54 54
55 55 # foo-%h.patch
56 56 exporting patches:
57 57 foo-617188a1c80f.patch
58 58 foo-dd41a5ff707a.patch
59 59 foo-f95a5410f866.patch
60 60 foo-4346bcfde53b.patch
61 61 foo-afda8c3a009c.patch
62 62 foo-35284ce2b6b9.patch
63 63 foo-9688c41894e6.patch
64 64 foo-747d3c68f8ec.patch
65 65 foo-5f17a83f5fbd.patch
66 66 foo-f3acbafac161.patch
67 67
68 68 # foo-%r.patch
69 69 exporting patches:
70 70 foo-02.patch
71 71 foo-03.patch
72 72 foo-04.patch
73 73 foo-05.patch
74 74 foo-06.patch
75 75 foo-07.patch
76 76 foo-08.patch
77 77 foo-09.patch
78 78 foo-10.patch
79 79 foo-11.patch
80 80
81 81 # foo-%m.patch
82 82 exporting patches:
83 83 foo-foo_2.patch
84 84 foo-foo_3.patch
85 85 foo-foo_4.patch
86 86 foo-foo_5.patch
87 87 foo-foo_6.patch
88 88 foo-foo_7.patch
89 89 foo-foo_8.patch
90 90 foo-foo_9.patch
91 91 foo-foo_10.patch
92 92 foo-foo_11.patch
93 93
94 94 Doing it again clobbers the files rather than appending:
95 95 $ hg export -v -o "foo-%m.patch" 2:3
96 96 exporting patches:
97 97 foo-foo_2.patch
98 98 foo-foo_3.patch
99 99 $ grep HG foo-foo_2.patch | wc -l
100 100 \s*1 (re)
101 101 $ grep HG foo-foo_3.patch | wc -l
102 102 \s*1 (re)
103 103
104 104 Exporting 4 changesets to a file:
105 105
106 106 $ hg export -o export_internal 1 2 3 4
107 107 $ grep HG export_internal | wc -l
108 108 \s*4 (re)
109 109
110 110 Doing it again clobbers the file rather than appending:
111 111 $ hg export -o export_internal 1 2 3 4
112 112 $ grep HG export_internal | wc -l
113 113 \s*4 (re)
114 114
115 115 Exporting 4 changesets to stdout:
116 116
117 117 $ hg export 1 2 3 4 | grep HG | wc -l
118 118 \s*4 (re)
119 119
120 120 Exporting revision -2 to a file:
121 121
122 122 $ hg export -- -2
123 123 # HG changeset patch
124 124 # User test
125 125 # Date 0 0
126 126 # Thu Jan 01 00:00:00 1970 +0000
127 127 # Node ID 5f17a83f5fbd9414006a5e563eab4c8a00729efd
128 128 # Parent 747d3c68f8ec44bb35816bfcd59aeb50b9654c2f
129 129 foo-10
130 130
131 131 diff -r 747d3c68f8ec -r 5f17a83f5fbd foo
132 132 --- a/foo Thu Jan 01 00:00:00 1970 +0000
133 133 +++ b/foo Thu Jan 01 00:00:00 1970 +0000
134 134 @@ -8,3 +8,4 @@
135 135 foo-7
136 136 foo-8
137 137 foo-9
138 138 +foo-10
139 139
140 140 No filename should be printed if stdout is specified explicitly:
141 141
142 142 $ hg export -v 1 -o -
143 exporting patch:
143 144 # HG changeset patch
144 145 # User test
145 146 # Date 0 0
146 147 # Thu Jan 01 00:00:00 1970 +0000
147 148 # Node ID d1c9656e973cfb5aebd5499bbd2cb350e3b12266
148 149 # Parent 871558de6af2e8c244222f8eea69b782c94ce3df
149 150 foo-1
150 151
151 152 diff -r 871558de6af2 -r d1c9656e973c foo
152 153 --- a/foo Thu Jan 01 00:00:00 1970 +0000
153 154 +++ b/foo Thu Jan 01 00:00:00 1970 +0000
154 155 @@ -1,1 +1,2 @@
155 156 foo-0
156 157 +foo-1
157 exporting patch:
158 158
159 159 Checking if only alphanumeric characters are used in the file name (%m option):
160 160
161 161 $ echo "line" >> foo
162 162 $ hg commit -m " !\"#$%&(,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_\`abcdefghijklmnopqrstuvwxyz{|}~"
163 163 $ hg export -v -o %m.patch tip
164 164 exporting patch:
165 165 ____________0123456789_______ABCDEFGHIJKLMNOPQRSTUVWXYZ______abcdefghijklmnopqrstuvwxyz____.patch
166 166
167 167 Catch exporting unknown revisions (especially empty revsets, see issue3353)
168 168
169 169 $ hg export
170 170 # HG changeset patch
171 171 # User test
172 172 # Date 0 0
173 173 # Thu Jan 01 00:00:00 1970 +0000
174 174 # Node ID 197ecd81a57f760b54f34a58817ad5b04991fa47
175 175 # Parent f3acbafac161ec68f1598af38f794f28847ca5d3
176 176 !"#$%&(,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~
177 177
178 178 diff -r f3acbafac161 -r 197ecd81a57f foo
179 179 --- a/foo Thu Jan 01 00:00:00 1970 +0000
180 180 +++ b/foo Thu Jan 01 00:00:00 1970 +0000
181 181 @@ -10,3 +10,4 @@
182 182 foo-9
183 183 foo-10
184 184 foo-11
185 185 +line
186 186
187 187 $ hg export ""
188 188 hg: parse error: empty query
189 189 [255]
190 190 $ hg export 999
191 191 abort: unknown revision '999'!
192 192 [255]
193 193 $ hg export "not all()"
194 194 abort: export requires at least one changeset
195 195 [255]
196 196
197 197 Check for color output
198 198 $ cat <<EOF >> $HGRCPATH
199 199 > [color]
200 200 > mode = ansi
201 201 > [extensions]
202 202 > color =
203 203 > EOF
204 204
205 205 $ hg export --color always --nodates tip
206 206 # HG changeset patch
207 207 # User test
208 208 # Date 0 0
209 209 # Thu Jan 01 00:00:00 1970 +0000
210 210 # Node ID * (glob)
211 211 # Parent * (glob)
212 212 !"#$%&(,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~
213 213
214 214 \x1b[0;1mdiff -r f3acbafac161 -r 197ecd81a57f foo\x1b[0m (esc)
215 215 \x1b[0;31;1m--- a/foo\x1b[0m (esc)
216 216 \x1b[0;32;1m+++ b/foo\x1b[0m (esc)
217 217 \x1b[0;35m@@ -10,3 +10,4 @@\x1b[0m (esc)
218 218 foo-9
219 219 foo-10
220 220 foo-11
221 221 \x1b[0;32m+line\x1b[0m (esc)
222 222
223 223
224 224 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now