##// END OF EJS Templates
revert: add an experimental config to use inverted selection...
Laurent Charignon -
r25424:69609f43 default
parent child Browse files
Show More
@@ -1,3352 +1,3363 b''
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import hex, nullid, nullrev, short
9 9 from i18n import _
10 10 import os, sys, errno, re, tempfile, cStringIO, shutil
11 11 import util, scmutil, templater, patch, error, templatekw, revlog, copies
12 12 import match as matchmod
13 13 import context, repair, graphmod, revset, phases, obsolete, pathutil
14 14 import changelog
15 15 import bookmarks
16 16 import encoding
17 17 import crecord as crecordmod
18 18 import lock as lockmod
19 19
20 20 def ishunk(x):
21 21 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
22 22 return isinstance(x, hunkclasses)
23 23
24 24 def newandmodified(chunks, originalchunks):
25 25 newlyaddedandmodifiedfiles = set()
26 26 for chunk in chunks:
27 27 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
28 28 originalchunks:
29 29 newlyaddedandmodifiedfiles.add(chunk.header.filename())
30 30 return newlyaddedandmodifiedfiles
31 31
32 32 def parsealiases(cmd):
33 33 return cmd.lstrip("^").split("|")
34 34
35 35 def setupwrapcolorwrite(ui):
36 36 # wrap ui.write so diff output can be labeled/colorized
37 37 def wrapwrite(orig, *args, **kw):
38 38 label = kw.pop('label', '')
39 39 for chunk, l in patch.difflabel(lambda: args):
40 40 orig(chunk, label=label + l)
41 41
42 42 oldwrite = ui.write
43 43 def wrap(*args, **kwargs):
44 44 return wrapwrite(oldwrite, *args, **kwargs)
45 45 setattr(ui, 'write', wrap)
46 46 return oldwrite
47 47
48 48 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
49 49 if usecurses:
50 50 if testfile:
51 51 recordfn = crecordmod.testdecorator(testfile,
52 52 crecordmod.testchunkselector)
53 53 else:
54 54 recordfn = crecordmod.chunkselector
55 55
56 56 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
57 57
58 58 else:
59 59 return patch.filterpatch(ui, originalhunks, operation)
60 60
61 61 def recordfilter(ui, originalhunks, operation=None):
62 62 """ Prompts the user to filter the originalhunks and return a list of
63 63 selected hunks.
64 64 *operation* is used for ui purposes to indicate the user
65 65 what kind of filtering they are doing: reverting, commiting, shelving, etc.
66 66 *operation* has to be a translated string.
67 67 """
68 68 usecurses = ui.configbool('experimental', 'crecord', False)
69 69 testfile = ui.config('experimental', 'crecordtest', None)
70 70 oldwrite = setupwrapcolorwrite(ui)
71 71 try:
72 72 newchunks = filterchunks(ui, originalhunks, usecurses, testfile,
73 73 operation)
74 74 finally:
75 75 ui.write = oldwrite
76 76 return newchunks
77 77
78 78 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
79 79 filterfn, *pats, **opts):
80 80 import merge as mergemod
81 81
82 82 if not ui.interactive():
83 83 raise util.Abort(_('running non-interactively, use %s instead') %
84 84 cmdsuggest)
85 85
86 86 # make sure username is set before going interactive
87 87 if not opts.get('user'):
88 88 ui.username() # raise exception, username not provided
89 89
90 90 def recordfunc(ui, repo, message, match, opts):
91 91 """This is generic record driver.
92 92
93 93 Its job is to interactively filter local changes, and
94 94 accordingly prepare working directory into a state in which the
95 95 job can be delegated to a non-interactive commit command such as
96 96 'commit' or 'qrefresh'.
97 97
98 98 After the actual job is done by non-interactive command, the
99 99 working directory is restored to its original state.
100 100
101 101 In the end we'll record interesting changes, and everything else
102 102 will be left in place, so the user can continue working.
103 103 """
104 104
105 105 checkunfinished(repo, commit=True)
106 106 merge = len(repo[None].parents()) > 1
107 107 if merge:
108 108 raise util.Abort(_('cannot partially commit a merge '
109 109 '(use "hg commit" instead)'))
110 110
111 111 status = repo.status(match=match)
112 112 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
113 113 diffopts.nodates = True
114 114 diffopts.git = True
115 115 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
116 116 originalchunks = patch.parsepatch(originaldiff)
117 117
118 118 # 1. filter patch, so we have intending-to apply subset of it
119 119 try:
120 120 chunks = filterfn(ui, originalchunks)
121 121 except patch.PatchError, err:
122 122 raise util.Abort(_('error parsing patch: %s') % err)
123 123
124 124 # We need to keep a backup of files that have been newly added and
125 125 # modified during the recording process because there is a previous
126 126 # version without the edit in the workdir
127 127 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
128 128 contenders = set()
129 129 for h in chunks:
130 130 try:
131 131 contenders.update(set(h.files()))
132 132 except AttributeError:
133 133 pass
134 134
135 135 changed = status.modified + status.added + status.removed
136 136 newfiles = [f for f in changed if f in contenders]
137 137 if not newfiles:
138 138 ui.status(_('no changes to record\n'))
139 139 return 0
140 140
141 141 modified = set(status.modified)
142 142
143 143 # 2. backup changed files, so we can restore them in the end
144 144
145 145 if backupall:
146 146 tobackup = changed
147 147 else:
148 148 tobackup = [f for f in newfiles if f in modified or f in \
149 149 newlyaddedandmodifiedfiles]
150 150 backups = {}
151 151 if tobackup:
152 152 backupdir = repo.join('record-backups')
153 153 try:
154 154 os.mkdir(backupdir)
155 155 except OSError, err:
156 156 if err.errno != errno.EEXIST:
157 157 raise
158 158 try:
159 159 # backup continues
160 160 for f in tobackup:
161 161 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
162 162 dir=backupdir)
163 163 os.close(fd)
164 164 ui.debug('backup %r as %r\n' % (f, tmpname))
165 165 util.copyfile(repo.wjoin(f), tmpname)
166 166 shutil.copystat(repo.wjoin(f), tmpname)
167 167 backups[f] = tmpname
168 168
169 169 fp = cStringIO.StringIO()
170 170 for c in chunks:
171 171 fname = c.filename()
172 172 if fname in backups:
173 173 c.write(fp)
174 174 dopatch = fp.tell()
175 175 fp.seek(0)
176 176
177 177 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
178 178 # 3a. apply filtered patch to clean repo (clean)
179 179 if backups:
180 180 # Equivalent to hg.revert
181 181 choices = lambda key: key in backups
182 182 mergemod.update(repo, repo.dirstate.p1(),
183 183 False, True, choices)
184 184
185 185 # 3b. (apply)
186 186 if dopatch:
187 187 try:
188 188 ui.debug('applying patch\n')
189 189 ui.debug(fp.getvalue())
190 190 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
191 191 except patch.PatchError, err:
192 192 raise util.Abort(str(err))
193 193 del fp
194 194
195 195 # 4. We prepared working directory according to filtered
196 196 # patch. Now is the time to delegate the job to
197 197 # commit/qrefresh or the like!
198 198
199 199 # Make all of the pathnames absolute.
200 200 newfiles = [repo.wjoin(nf) for nf in newfiles]
201 201 return commitfunc(ui, repo, *newfiles, **opts)
202 202 finally:
203 203 # 5. finally restore backed-up files
204 204 try:
205 205 for realname, tmpname in backups.iteritems():
206 206 ui.debug('restoring %r to %r\n' % (tmpname, realname))
207 207 util.copyfile(tmpname, repo.wjoin(realname))
208 208 # Our calls to copystat() here and above are a
209 209 # hack to trick any editors that have f open that
210 210 # we haven't modified them.
211 211 #
212 212 # Also note that this racy as an editor could
213 213 # notice the file's mtime before we've finished
214 214 # writing it.
215 215 shutil.copystat(tmpname, repo.wjoin(realname))
216 216 os.unlink(tmpname)
217 217 if tobackup:
218 218 os.rmdir(backupdir)
219 219 except OSError:
220 220 pass
221 221
222 222 return commit(ui, repo, recordfunc, pats, opts)
223 223
224 224 def findpossible(cmd, table, strict=False):
225 225 """
226 226 Return cmd -> (aliases, command table entry)
227 227 for each matching command.
228 228 Return debug commands (or their aliases) only if no normal command matches.
229 229 """
230 230 choice = {}
231 231 debugchoice = {}
232 232
233 233 if cmd in table:
234 234 # short-circuit exact matches, "log" alias beats "^log|history"
235 235 keys = [cmd]
236 236 else:
237 237 keys = table.keys()
238 238
239 239 allcmds = []
240 240 for e in keys:
241 241 aliases = parsealiases(e)
242 242 allcmds.extend(aliases)
243 243 found = None
244 244 if cmd in aliases:
245 245 found = cmd
246 246 elif not strict:
247 247 for a in aliases:
248 248 if a.startswith(cmd):
249 249 found = a
250 250 break
251 251 if found is not None:
252 252 if aliases[0].startswith("debug") or found.startswith("debug"):
253 253 debugchoice[found] = (aliases, table[e])
254 254 else:
255 255 choice[found] = (aliases, table[e])
256 256
257 257 if not choice and debugchoice:
258 258 choice = debugchoice
259 259
260 260 return choice, allcmds
261 261
262 262 def findcmd(cmd, table, strict=True):
263 263 """Return (aliases, command table entry) for command string."""
264 264 choice, allcmds = findpossible(cmd, table, strict)
265 265
266 266 if cmd in choice:
267 267 return choice[cmd]
268 268
269 269 if len(choice) > 1:
270 270 clist = choice.keys()
271 271 clist.sort()
272 272 raise error.AmbiguousCommand(cmd, clist)
273 273
274 274 if choice:
275 275 return choice.values()[0]
276 276
277 277 raise error.UnknownCommand(cmd, allcmds)
278 278
279 279 def findrepo(p):
280 280 while not os.path.isdir(os.path.join(p, ".hg")):
281 281 oldp, p = p, os.path.dirname(p)
282 282 if p == oldp:
283 283 return None
284 284
285 285 return p
286 286
287 287 def bailifchanged(repo, merge=True):
288 288 if merge and repo.dirstate.p2() != nullid:
289 289 raise util.Abort(_('outstanding uncommitted merge'))
290 290 modified, added, removed, deleted = repo.status()[:4]
291 291 if modified or added or removed or deleted:
292 292 raise util.Abort(_('uncommitted changes'))
293 293 ctx = repo[None]
294 294 for s in sorted(ctx.substate):
295 295 ctx.sub(s).bailifchanged()
296 296
297 297 def logmessage(ui, opts):
298 298 """ get the log message according to -m and -l option """
299 299 message = opts.get('message')
300 300 logfile = opts.get('logfile')
301 301
302 302 if message and logfile:
303 303 raise util.Abort(_('options --message and --logfile are mutually '
304 304 'exclusive'))
305 305 if not message and logfile:
306 306 try:
307 307 if logfile == '-':
308 308 message = ui.fin.read()
309 309 else:
310 310 message = '\n'.join(util.readfile(logfile).splitlines())
311 311 except IOError, inst:
312 312 raise util.Abort(_("can't read commit message '%s': %s") %
313 313 (logfile, inst.strerror))
314 314 return message
315 315
316 316 def mergeeditform(ctxorbool, baseformname):
317 317 """return appropriate editform name (referencing a committemplate)
318 318
319 319 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
320 320 merging is committed.
321 321
322 322 This returns baseformname with '.merge' appended if it is a merge,
323 323 otherwise '.normal' is appended.
324 324 """
325 325 if isinstance(ctxorbool, bool):
326 326 if ctxorbool:
327 327 return baseformname + ".merge"
328 328 elif 1 < len(ctxorbool.parents()):
329 329 return baseformname + ".merge"
330 330
331 331 return baseformname + ".normal"
332 332
333 333 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
334 334 editform='', **opts):
335 335 """get appropriate commit message editor according to '--edit' option
336 336
337 337 'finishdesc' is a function to be called with edited commit message
338 338 (= 'description' of the new changeset) just after editing, but
339 339 before checking empty-ness. It should return actual text to be
340 340 stored into history. This allows to change description before
341 341 storing.
342 342
343 343 'extramsg' is a extra message to be shown in the editor instead of
344 344 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
345 345 is automatically added.
346 346
347 347 'editform' is a dot-separated list of names, to distinguish
348 348 the purpose of commit text editing.
349 349
350 350 'getcommiteditor' returns 'commitforceeditor' regardless of
351 351 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
352 352 they are specific for usage in MQ.
353 353 """
354 354 if edit or finishdesc or extramsg:
355 355 return lambda r, c, s: commitforceeditor(r, c, s,
356 356 finishdesc=finishdesc,
357 357 extramsg=extramsg,
358 358 editform=editform)
359 359 elif editform:
360 360 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
361 361 else:
362 362 return commiteditor
363 363
364 364 def loglimit(opts):
365 365 """get the log limit according to option -l/--limit"""
366 366 limit = opts.get('limit')
367 367 if limit:
368 368 try:
369 369 limit = int(limit)
370 370 except ValueError:
371 371 raise util.Abort(_('limit must be a positive integer'))
372 372 if limit <= 0:
373 373 raise util.Abort(_('limit must be positive'))
374 374 else:
375 375 limit = None
376 376 return limit
377 377
378 378 def makefilename(repo, pat, node, desc=None,
379 379 total=None, seqno=None, revwidth=None, pathname=None):
380 380 node_expander = {
381 381 'H': lambda: hex(node),
382 382 'R': lambda: str(repo.changelog.rev(node)),
383 383 'h': lambda: short(node),
384 384 'm': lambda: re.sub('[^\w]', '_', str(desc))
385 385 }
386 386 expander = {
387 387 '%': lambda: '%',
388 388 'b': lambda: os.path.basename(repo.root),
389 389 }
390 390
391 391 try:
392 392 if node:
393 393 expander.update(node_expander)
394 394 if node:
395 395 expander['r'] = (lambda:
396 396 str(repo.changelog.rev(node)).zfill(revwidth or 0))
397 397 if total is not None:
398 398 expander['N'] = lambda: str(total)
399 399 if seqno is not None:
400 400 expander['n'] = lambda: str(seqno)
401 401 if total is not None and seqno is not None:
402 402 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
403 403 if pathname is not None:
404 404 expander['s'] = lambda: os.path.basename(pathname)
405 405 expander['d'] = lambda: os.path.dirname(pathname) or '.'
406 406 expander['p'] = lambda: pathname
407 407
408 408 newname = []
409 409 patlen = len(pat)
410 410 i = 0
411 411 while i < patlen:
412 412 c = pat[i]
413 413 if c == '%':
414 414 i += 1
415 415 c = pat[i]
416 416 c = expander[c]()
417 417 newname.append(c)
418 418 i += 1
419 419 return ''.join(newname)
420 420 except KeyError, inst:
421 421 raise util.Abort(_("invalid format spec '%%%s' in output filename") %
422 422 inst.args[0])
423 423
424 424 def makefileobj(repo, pat, node=None, desc=None, total=None,
425 425 seqno=None, revwidth=None, mode='wb', modemap=None,
426 426 pathname=None):
427 427
428 428 writable = mode not in ('r', 'rb')
429 429
430 430 if not pat or pat == '-':
431 431 if writable:
432 432 fp = repo.ui.fout
433 433 else:
434 434 fp = repo.ui.fin
435 435 if util.safehasattr(fp, 'fileno'):
436 436 return os.fdopen(os.dup(fp.fileno()), mode)
437 437 else:
438 438 # if this fp can't be duped properly, return
439 439 # a dummy object that can be closed
440 440 class wrappedfileobj(object):
441 441 noop = lambda x: None
442 442 def __init__(self, f):
443 443 self.f = f
444 444 def __getattr__(self, attr):
445 445 if attr == 'close':
446 446 return self.noop
447 447 else:
448 448 return getattr(self.f, attr)
449 449
450 450 return wrappedfileobj(fp)
451 451 if util.safehasattr(pat, 'write') and writable:
452 452 return pat
453 453 if util.safehasattr(pat, 'read') and 'r' in mode:
454 454 return pat
455 455 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
456 456 if modemap is not None:
457 457 mode = modemap.get(fn, mode)
458 458 if mode == 'wb':
459 459 modemap[fn] = 'ab'
460 460 return open(fn, mode)
461 461
462 462 def openrevlog(repo, cmd, file_, opts):
463 463 """opens the changelog, manifest, a filelog or a given revlog"""
464 464 cl = opts['changelog']
465 465 mf = opts['manifest']
466 466 dir = opts['dir']
467 467 msg = None
468 468 if cl and mf:
469 469 msg = _('cannot specify --changelog and --manifest at the same time')
470 470 elif cl and dir:
471 471 msg = _('cannot specify --changelog and --dir at the same time')
472 472 elif cl or mf:
473 473 if file_:
474 474 msg = _('cannot specify filename with --changelog or --manifest')
475 475 elif not repo:
476 476 msg = _('cannot specify --changelog or --manifest or --dir '
477 477 'without a repository')
478 478 if msg:
479 479 raise util.Abort(msg)
480 480
481 481 r = None
482 482 if repo:
483 483 if cl:
484 484 r = repo.unfiltered().changelog
485 485 elif dir:
486 486 if 'treemanifest' not in repo.requirements:
487 487 raise util.Abort(_("--dir can only be used on repos with "
488 488 "treemanifest enabled"))
489 489 dirlog = repo.dirlog(file_)
490 490 if len(dirlog):
491 491 r = dirlog
492 492 elif mf:
493 493 r = repo.manifest
494 494 elif file_:
495 495 filelog = repo.file(file_)
496 496 if len(filelog):
497 497 r = filelog
498 498 if not r:
499 499 if not file_:
500 500 raise error.CommandError(cmd, _('invalid arguments'))
501 501 if not os.path.isfile(file_):
502 502 raise util.Abort(_("revlog '%s' not found") % file_)
503 503 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
504 504 file_[:-2] + ".i")
505 505 return r
506 506
507 507 def copy(ui, repo, pats, opts, rename=False):
508 508 # called with the repo lock held
509 509 #
510 510 # hgsep => pathname that uses "/" to separate directories
511 511 # ossep => pathname that uses os.sep to separate directories
512 512 cwd = repo.getcwd()
513 513 targets = {}
514 514 after = opts.get("after")
515 515 dryrun = opts.get("dry_run")
516 516 wctx = repo[None]
517 517
518 518 def walkpat(pat):
519 519 srcs = []
520 520 if after:
521 521 badstates = '?'
522 522 else:
523 523 badstates = '?r'
524 524 m = scmutil.match(repo[None], [pat], opts, globbed=True)
525 525 for abs in repo.walk(m):
526 526 state = repo.dirstate[abs]
527 527 rel = m.rel(abs)
528 528 exact = m.exact(abs)
529 529 if state in badstates:
530 530 if exact and state == '?':
531 531 ui.warn(_('%s: not copying - file is not managed\n') % rel)
532 532 if exact and state == 'r':
533 533 ui.warn(_('%s: not copying - file has been marked for'
534 534 ' remove\n') % rel)
535 535 continue
536 536 # abs: hgsep
537 537 # rel: ossep
538 538 srcs.append((abs, rel, exact))
539 539 return srcs
540 540
541 541 # abssrc: hgsep
542 542 # relsrc: ossep
543 543 # otarget: ossep
544 544 def copyfile(abssrc, relsrc, otarget, exact):
545 545 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
546 546 if '/' in abstarget:
547 547 # We cannot normalize abstarget itself, this would prevent
548 548 # case only renames, like a => A.
549 549 abspath, absname = abstarget.rsplit('/', 1)
550 550 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
551 551 reltarget = repo.pathto(abstarget, cwd)
552 552 target = repo.wjoin(abstarget)
553 553 src = repo.wjoin(abssrc)
554 554 state = repo.dirstate[abstarget]
555 555
556 556 scmutil.checkportable(ui, abstarget)
557 557
558 558 # check for collisions
559 559 prevsrc = targets.get(abstarget)
560 560 if prevsrc is not None:
561 561 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
562 562 (reltarget, repo.pathto(abssrc, cwd),
563 563 repo.pathto(prevsrc, cwd)))
564 564 return
565 565
566 566 # check for overwrites
567 567 exists = os.path.lexists(target)
568 568 samefile = False
569 569 if exists and abssrc != abstarget:
570 570 if (repo.dirstate.normalize(abssrc) ==
571 571 repo.dirstate.normalize(abstarget)):
572 572 if not rename:
573 573 ui.warn(_("%s: can't copy - same file\n") % reltarget)
574 574 return
575 575 exists = False
576 576 samefile = True
577 577
578 578 if not after and exists or after and state in 'mn':
579 579 if not opts['force']:
580 580 ui.warn(_('%s: not overwriting - file exists\n') %
581 581 reltarget)
582 582 return
583 583
584 584 if after:
585 585 if not exists:
586 586 if rename:
587 587 ui.warn(_('%s: not recording move - %s does not exist\n') %
588 588 (relsrc, reltarget))
589 589 else:
590 590 ui.warn(_('%s: not recording copy - %s does not exist\n') %
591 591 (relsrc, reltarget))
592 592 return
593 593 elif not dryrun:
594 594 try:
595 595 if exists:
596 596 os.unlink(target)
597 597 targetdir = os.path.dirname(target) or '.'
598 598 if not os.path.isdir(targetdir):
599 599 os.makedirs(targetdir)
600 600 if samefile:
601 601 tmp = target + "~hgrename"
602 602 os.rename(src, tmp)
603 603 os.rename(tmp, target)
604 604 else:
605 605 util.copyfile(src, target)
606 606 srcexists = True
607 607 except IOError, inst:
608 608 if inst.errno == errno.ENOENT:
609 609 ui.warn(_('%s: deleted in working directory\n') % relsrc)
610 610 srcexists = False
611 611 else:
612 612 ui.warn(_('%s: cannot copy - %s\n') %
613 613 (relsrc, inst.strerror))
614 614 return True # report a failure
615 615
616 616 if ui.verbose or not exact:
617 617 if rename:
618 618 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
619 619 else:
620 620 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
621 621
622 622 targets[abstarget] = abssrc
623 623
624 624 # fix up dirstate
625 625 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
626 626 dryrun=dryrun, cwd=cwd)
627 627 if rename and not dryrun:
628 628 if not after and srcexists and not samefile:
629 629 util.unlinkpath(repo.wjoin(abssrc))
630 630 wctx.forget([abssrc])
631 631
632 632 # pat: ossep
633 633 # dest ossep
634 634 # srcs: list of (hgsep, hgsep, ossep, bool)
635 635 # return: function that takes hgsep and returns ossep
636 636 def targetpathfn(pat, dest, srcs):
637 637 if os.path.isdir(pat):
638 638 abspfx = pathutil.canonpath(repo.root, cwd, pat)
639 639 abspfx = util.localpath(abspfx)
640 640 if destdirexists:
641 641 striplen = len(os.path.split(abspfx)[0])
642 642 else:
643 643 striplen = len(abspfx)
644 644 if striplen:
645 645 striplen += len(os.sep)
646 646 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
647 647 elif destdirexists:
648 648 res = lambda p: os.path.join(dest,
649 649 os.path.basename(util.localpath(p)))
650 650 else:
651 651 res = lambda p: dest
652 652 return res
653 653
654 654 # pat: ossep
655 655 # dest ossep
656 656 # srcs: list of (hgsep, hgsep, ossep, bool)
657 657 # return: function that takes hgsep and returns ossep
658 658 def targetpathafterfn(pat, dest, srcs):
659 659 if matchmod.patkind(pat):
660 660 # a mercurial pattern
661 661 res = lambda p: os.path.join(dest,
662 662 os.path.basename(util.localpath(p)))
663 663 else:
664 664 abspfx = pathutil.canonpath(repo.root, cwd, pat)
665 665 if len(abspfx) < len(srcs[0][0]):
666 666 # A directory. Either the target path contains the last
667 667 # component of the source path or it does not.
668 668 def evalpath(striplen):
669 669 score = 0
670 670 for s in srcs:
671 671 t = os.path.join(dest, util.localpath(s[0])[striplen:])
672 672 if os.path.lexists(t):
673 673 score += 1
674 674 return score
675 675
676 676 abspfx = util.localpath(abspfx)
677 677 striplen = len(abspfx)
678 678 if striplen:
679 679 striplen += len(os.sep)
680 680 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
681 681 score = evalpath(striplen)
682 682 striplen1 = len(os.path.split(abspfx)[0])
683 683 if striplen1:
684 684 striplen1 += len(os.sep)
685 685 if evalpath(striplen1) > score:
686 686 striplen = striplen1
687 687 res = lambda p: os.path.join(dest,
688 688 util.localpath(p)[striplen:])
689 689 else:
690 690 # a file
691 691 if destdirexists:
692 692 res = lambda p: os.path.join(dest,
693 693 os.path.basename(util.localpath(p)))
694 694 else:
695 695 res = lambda p: dest
696 696 return res
697 697
698 698 pats = scmutil.expandpats(pats)
699 699 if not pats:
700 700 raise util.Abort(_('no source or destination specified'))
701 701 if len(pats) == 1:
702 702 raise util.Abort(_('no destination specified'))
703 703 dest = pats.pop()
704 704 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
705 705 if not destdirexists:
706 706 if len(pats) > 1 or matchmod.patkind(pats[0]):
707 707 raise util.Abort(_('with multiple sources, destination must be an '
708 708 'existing directory'))
709 709 if util.endswithsep(dest):
710 710 raise util.Abort(_('destination %s is not a directory') % dest)
711 711
712 712 tfn = targetpathfn
713 713 if after:
714 714 tfn = targetpathafterfn
715 715 copylist = []
716 716 for pat in pats:
717 717 srcs = walkpat(pat)
718 718 if not srcs:
719 719 continue
720 720 copylist.append((tfn(pat, dest, srcs), srcs))
721 721 if not copylist:
722 722 raise util.Abort(_('no files to copy'))
723 723
724 724 errors = 0
725 725 for targetpath, srcs in copylist:
726 726 for abssrc, relsrc, exact in srcs:
727 727 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
728 728 errors += 1
729 729
730 730 if errors:
731 731 ui.warn(_('(consider using --after)\n'))
732 732
733 733 return errors != 0
734 734
735 735 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
736 736 runargs=None, appendpid=False):
737 737 '''Run a command as a service.'''
738 738
739 739 def writepid(pid):
740 740 if opts['pid_file']:
741 741 if appendpid:
742 742 mode = 'a'
743 743 else:
744 744 mode = 'w'
745 745 fp = open(opts['pid_file'], mode)
746 746 fp.write(str(pid) + '\n')
747 747 fp.close()
748 748
749 749 if opts['daemon'] and not opts['daemon_pipefds']:
750 750 # Signal child process startup with file removal
751 751 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
752 752 os.close(lockfd)
753 753 try:
754 754 if not runargs:
755 755 runargs = util.hgcmd() + sys.argv[1:]
756 756 runargs.append('--daemon-pipefds=%s' % lockpath)
757 757 # Don't pass --cwd to the child process, because we've already
758 758 # changed directory.
759 759 for i in xrange(1, len(runargs)):
760 760 if runargs[i].startswith('--cwd='):
761 761 del runargs[i]
762 762 break
763 763 elif runargs[i].startswith('--cwd'):
764 764 del runargs[i:i + 2]
765 765 break
766 766 def condfn():
767 767 return not os.path.exists(lockpath)
768 768 pid = util.rundetached(runargs, condfn)
769 769 if pid < 0:
770 770 raise util.Abort(_('child process failed to start'))
771 771 writepid(pid)
772 772 finally:
773 773 try:
774 774 os.unlink(lockpath)
775 775 except OSError, e:
776 776 if e.errno != errno.ENOENT:
777 777 raise
778 778 if parentfn:
779 779 return parentfn(pid)
780 780 else:
781 781 return
782 782
783 783 if initfn:
784 784 initfn()
785 785
786 786 if not opts['daemon']:
787 787 writepid(os.getpid())
788 788
789 789 if opts['daemon_pipefds']:
790 790 lockpath = opts['daemon_pipefds']
791 791 try:
792 792 os.setsid()
793 793 except AttributeError:
794 794 pass
795 795 os.unlink(lockpath)
796 796 util.hidewindow()
797 797 sys.stdout.flush()
798 798 sys.stderr.flush()
799 799
800 800 nullfd = os.open(os.devnull, os.O_RDWR)
801 801 logfilefd = nullfd
802 802 if logfile:
803 803 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
804 804 os.dup2(nullfd, 0)
805 805 os.dup2(logfilefd, 1)
806 806 os.dup2(logfilefd, 2)
807 807 if nullfd not in (0, 1, 2):
808 808 os.close(nullfd)
809 809 if logfile and logfilefd not in (0, 1, 2):
810 810 os.close(logfilefd)
811 811
812 812 if runfn:
813 813 return runfn()
814 814
815 815 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
816 816 """Utility function used by commands.import to import a single patch
817 817
818 818 This function is explicitly defined here to help the evolve extension to
819 819 wrap this part of the import logic.
820 820
821 821 The API is currently a bit ugly because it a simple code translation from
822 822 the import command. Feel free to make it better.
823 823
824 824 :hunk: a patch (as a binary string)
825 825 :parents: nodes that will be parent of the created commit
826 826 :opts: the full dict of option passed to the import command
827 827 :msgs: list to save commit message to.
828 828 (used in case we need to save it when failing)
829 829 :updatefunc: a function that update a repo to a given node
830 830 updatefunc(<repo>, <node>)
831 831 """
832 832 tmpname, message, user, date, branch, nodeid, p1, p2 = \
833 833 patch.extract(ui, hunk)
834 834
835 835 update = not opts.get('bypass')
836 836 strip = opts["strip"]
837 837 prefix = opts["prefix"]
838 838 sim = float(opts.get('similarity') or 0)
839 839 if not tmpname:
840 840 return (None, None, False)
841 841 msg = _('applied to working directory')
842 842
843 843 rejects = False
844 844 dsguard = None
845 845
846 846 try:
847 847 cmdline_message = logmessage(ui, opts)
848 848 if cmdline_message:
849 849 # pickup the cmdline msg
850 850 message = cmdline_message
851 851 elif message:
852 852 # pickup the patch msg
853 853 message = message.strip()
854 854 else:
855 855 # launch the editor
856 856 message = None
857 857 ui.debug('message:\n%s\n' % message)
858 858
859 859 if len(parents) == 1:
860 860 parents.append(repo[nullid])
861 861 if opts.get('exact'):
862 862 if not nodeid or not p1:
863 863 raise util.Abort(_('not a Mercurial patch'))
864 864 p1 = repo[p1]
865 865 p2 = repo[p2 or nullid]
866 866 elif p2:
867 867 try:
868 868 p1 = repo[p1]
869 869 p2 = repo[p2]
870 870 # Without any options, consider p2 only if the
871 871 # patch is being applied on top of the recorded
872 872 # first parent.
873 873 if p1 != parents[0]:
874 874 p1 = parents[0]
875 875 p2 = repo[nullid]
876 876 except error.RepoError:
877 877 p1, p2 = parents
878 878 if p2.node() == nullid:
879 879 ui.warn(_("warning: import the patch as a normal revision\n"
880 880 "(use --exact to import the patch as a merge)\n"))
881 881 else:
882 882 p1, p2 = parents
883 883
884 884 n = None
885 885 if update:
886 886 dsguard = dirstateguard(repo, 'tryimportone')
887 887 if p1 != parents[0]:
888 888 updatefunc(repo, p1.node())
889 889 if p2 != parents[1]:
890 890 repo.setparents(p1.node(), p2.node())
891 891
892 892 if opts.get('exact') or opts.get('import_branch'):
893 893 repo.dirstate.setbranch(branch or 'default')
894 894
895 895 partial = opts.get('partial', False)
896 896 files = set()
897 897 try:
898 898 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
899 899 files=files, eolmode=None, similarity=sim / 100.0)
900 900 except patch.PatchError, e:
901 901 if not partial:
902 902 raise util.Abort(str(e))
903 903 if partial:
904 904 rejects = True
905 905
906 906 files = list(files)
907 907 if opts.get('no_commit'):
908 908 if message:
909 909 msgs.append(message)
910 910 else:
911 911 if opts.get('exact') or p2:
912 912 # If you got here, you either use --force and know what
913 913 # you are doing or used --exact or a merge patch while
914 914 # being updated to its first parent.
915 915 m = None
916 916 else:
917 917 m = scmutil.matchfiles(repo, files or [])
918 918 editform = mergeeditform(repo[None], 'import.normal')
919 919 if opts.get('exact'):
920 920 editor = None
921 921 else:
922 922 editor = getcommiteditor(editform=editform, **opts)
923 923 allowemptyback = repo.ui.backupconfig('ui', 'allowemptycommit')
924 924 try:
925 925 if partial:
926 926 repo.ui.setconfig('ui', 'allowemptycommit', True)
927 927 n = repo.commit(message, opts.get('user') or user,
928 928 opts.get('date') or date, match=m,
929 929 editor=editor)
930 930 finally:
931 931 repo.ui.restoreconfig(allowemptyback)
932 932 dsguard.close()
933 933 else:
934 934 if opts.get('exact') or opts.get('import_branch'):
935 935 branch = branch or 'default'
936 936 else:
937 937 branch = p1.branch()
938 938 store = patch.filestore()
939 939 try:
940 940 files = set()
941 941 try:
942 942 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
943 943 files, eolmode=None)
944 944 except patch.PatchError, e:
945 945 raise util.Abort(str(e))
946 946 if opts.get('exact'):
947 947 editor = None
948 948 else:
949 949 editor = getcommiteditor(editform='import.bypass')
950 950 memctx = context.makememctx(repo, (p1.node(), p2.node()),
951 951 message,
952 952 opts.get('user') or user,
953 953 opts.get('date') or date,
954 954 branch, files, store,
955 955 editor=editor)
956 956 n = memctx.commit()
957 957 finally:
958 958 store.close()
959 959 if opts.get('exact') and opts.get('no_commit'):
960 960 # --exact with --no-commit is still useful in that it does merge
961 961 # and branch bits
962 962 ui.warn(_("warning: can't check exact import with --no-commit\n"))
963 963 elif opts.get('exact') and hex(n) != nodeid:
964 964 raise util.Abort(_('patch is damaged or loses information'))
965 965 if n:
966 966 # i18n: refers to a short changeset id
967 967 msg = _('created %s') % short(n)
968 968 return (msg, n, rejects)
969 969 finally:
970 970 lockmod.release(dsguard)
971 971 os.unlink(tmpname)
972 972
973 973 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
974 974 opts=None):
975 975 '''export changesets as hg patches.'''
976 976
977 977 total = len(revs)
978 978 revwidth = max([len(str(rev)) for rev in revs])
979 979 filemode = {}
980 980
981 981 def single(rev, seqno, fp):
982 982 ctx = repo[rev]
983 983 node = ctx.node()
984 984 parents = [p.node() for p in ctx.parents() if p]
985 985 branch = ctx.branch()
986 986 if switch_parent:
987 987 parents.reverse()
988 988
989 989 if parents:
990 990 prev = parents[0]
991 991 else:
992 992 prev = nullid
993 993
994 994 shouldclose = False
995 995 if not fp and len(template) > 0:
996 996 desc_lines = ctx.description().rstrip().split('\n')
997 997 desc = desc_lines[0] #Commit always has a first line.
998 998 fp = makefileobj(repo, template, node, desc=desc, total=total,
999 999 seqno=seqno, revwidth=revwidth, mode='wb',
1000 1000 modemap=filemode)
1001 1001 if fp != template:
1002 1002 shouldclose = True
1003 1003 if fp and fp != sys.stdout and util.safehasattr(fp, 'name'):
1004 1004 repo.ui.note("%s\n" % fp.name)
1005 1005
1006 1006 if not fp:
1007 1007 write = repo.ui.write
1008 1008 else:
1009 1009 def write(s, **kw):
1010 1010 fp.write(s)
1011 1011
1012 1012 write("# HG changeset patch\n")
1013 1013 write("# User %s\n" % ctx.user())
1014 1014 write("# Date %d %d\n" % ctx.date())
1015 1015 write("# %s\n" % util.datestr(ctx.date()))
1016 1016 if branch and branch != 'default':
1017 1017 write("# Branch %s\n" % branch)
1018 1018 write("# Node ID %s\n" % hex(node))
1019 1019 write("# Parent %s\n" % hex(prev))
1020 1020 if len(parents) > 1:
1021 1021 write("# Parent %s\n" % hex(parents[1]))
1022 1022 write(ctx.description().rstrip())
1023 1023 write("\n\n")
1024 1024
1025 1025 for chunk, label in patch.diffui(repo, prev, node, opts=opts):
1026 1026 write(chunk, label=label)
1027 1027
1028 1028 if shouldclose:
1029 1029 fp.close()
1030 1030
1031 1031 for seqno, rev in enumerate(revs):
1032 1032 single(rev, seqno + 1, fp)
1033 1033
1034 1034 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1035 1035 changes=None, stat=False, fp=None, prefix='',
1036 1036 root='', listsubrepos=False):
1037 1037 '''show diff or diffstat.'''
1038 1038 if fp is None:
1039 1039 write = ui.write
1040 1040 else:
1041 1041 def write(s, **kw):
1042 1042 fp.write(s)
1043 1043
1044 1044 if root:
1045 1045 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1046 1046 else:
1047 1047 relroot = ''
1048 1048 if relroot != '':
1049 1049 # XXX relative roots currently don't work if the root is within a
1050 1050 # subrepo
1051 1051 uirelroot = match.uipath(relroot)
1052 1052 relroot += '/'
1053 1053 for matchroot in match.files():
1054 1054 if not matchroot.startswith(relroot):
1055 1055 ui.warn(_('warning: %s not inside relative root %s\n') % (
1056 1056 match.uipath(matchroot), uirelroot))
1057 1057
1058 1058 if stat:
1059 1059 diffopts = diffopts.copy(context=0)
1060 1060 width = 80
1061 1061 if not ui.plain():
1062 1062 width = ui.termwidth()
1063 1063 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1064 1064 prefix=prefix, relroot=relroot)
1065 1065 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1066 1066 width=width,
1067 1067 git=diffopts.git):
1068 1068 write(chunk, label=label)
1069 1069 else:
1070 1070 for chunk, label in patch.diffui(repo, node1, node2, match,
1071 1071 changes, diffopts, prefix=prefix,
1072 1072 relroot=relroot):
1073 1073 write(chunk, label=label)
1074 1074
1075 1075 if listsubrepos:
1076 1076 ctx1 = repo[node1]
1077 1077 ctx2 = repo[node2]
1078 1078 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1079 1079 tempnode2 = node2
1080 1080 try:
1081 1081 if node2 is not None:
1082 1082 tempnode2 = ctx2.substate[subpath][1]
1083 1083 except KeyError:
1084 1084 # A subrepo that existed in node1 was deleted between node1 and
1085 1085 # node2 (inclusive). Thus, ctx2's substate won't contain that
1086 1086 # subpath. The best we can do is to ignore it.
1087 1087 tempnode2 = None
1088 1088 submatch = matchmod.narrowmatcher(subpath, match)
1089 1089 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1090 1090 stat=stat, fp=fp, prefix=prefix)
1091 1091
1092 1092 class changeset_printer(object):
1093 1093 '''show changeset information when templating not requested.'''
1094 1094
1095 1095 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1096 1096 self.ui = ui
1097 1097 self.repo = repo
1098 1098 self.buffered = buffered
1099 1099 self.matchfn = matchfn
1100 1100 self.diffopts = diffopts
1101 1101 self.header = {}
1102 1102 self.hunk = {}
1103 1103 self.lastheader = None
1104 1104 self.footer = None
1105 1105
1106 1106 def flush(self, rev):
1107 1107 if rev in self.header:
1108 1108 h = self.header[rev]
1109 1109 if h != self.lastheader:
1110 1110 self.lastheader = h
1111 1111 self.ui.write(h)
1112 1112 del self.header[rev]
1113 1113 if rev in self.hunk:
1114 1114 self.ui.write(self.hunk[rev])
1115 1115 del self.hunk[rev]
1116 1116 return 1
1117 1117 return 0
1118 1118
1119 1119 def close(self):
1120 1120 if self.footer:
1121 1121 self.ui.write(self.footer)
1122 1122
1123 1123 def show(self, ctx, copies=None, matchfn=None, **props):
1124 1124 if self.buffered:
1125 1125 self.ui.pushbuffer()
1126 1126 self._show(ctx, copies, matchfn, props)
1127 1127 self.hunk[ctx.rev()] = self.ui.popbuffer(labeled=True)
1128 1128 else:
1129 1129 self._show(ctx, copies, matchfn, props)
1130 1130
1131 1131 def _show(self, ctx, copies, matchfn, props):
1132 1132 '''show a single changeset or file revision'''
1133 1133 changenode = ctx.node()
1134 1134 rev = ctx.rev()
1135 1135 if self.ui.debugflag:
1136 1136 hexfunc = hex
1137 1137 else:
1138 1138 hexfunc = short
1139 1139 if rev is None:
1140 1140 pctx = ctx.p1()
1141 1141 revnode = (pctx.rev(), hexfunc(pctx.node()) + '+')
1142 1142 else:
1143 1143 revnode = (rev, hexfunc(changenode))
1144 1144
1145 1145 if self.ui.quiet:
1146 1146 self.ui.write("%d:%s\n" % revnode, label='log.node')
1147 1147 return
1148 1148
1149 1149 date = util.datestr(ctx.date())
1150 1150
1151 1151 # i18n: column positioning for "hg log"
1152 1152 self.ui.write(_("changeset: %d:%s\n") % revnode,
1153 1153 label='log.changeset changeset.%s' % ctx.phasestr())
1154 1154
1155 1155 # branches are shown first before any other names due to backwards
1156 1156 # compatibility
1157 1157 branch = ctx.branch()
1158 1158 # don't show the default branch name
1159 1159 if branch != 'default':
1160 1160 # i18n: column positioning for "hg log"
1161 1161 self.ui.write(_("branch: %s\n") % branch,
1162 1162 label='log.branch')
1163 1163
1164 1164 for name, ns in self.repo.names.iteritems():
1165 1165 # branches has special logic already handled above, so here we just
1166 1166 # skip it
1167 1167 if name == 'branches':
1168 1168 continue
1169 1169 # we will use the templatename as the color name since those two
1170 1170 # should be the same
1171 1171 for name in ns.names(self.repo, changenode):
1172 1172 self.ui.write(ns.logfmt % name,
1173 1173 label='log.%s' % ns.colorname)
1174 1174 if self.ui.debugflag:
1175 1175 # i18n: column positioning for "hg log"
1176 1176 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1177 1177 label='log.phase')
1178 1178 for pctx in self._meaningful_parentrevs(ctx):
1179 1179 label = 'log.parent changeset.%s' % pctx.phasestr()
1180 1180 # i18n: column positioning for "hg log"
1181 1181 self.ui.write(_("parent: %d:%s\n")
1182 1182 % (pctx.rev(), hexfunc(pctx.node())),
1183 1183 label=label)
1184 1184
1185 1185 if self.ui.debugflag and rev is not None:
1186 1186 mnode = ctx.manifestnode()
1187 1187 # i18n: column positioning for "hg log"
1188 1188 self.ui.write(_("manifest: %d:%s\n") %
1189 1189 (self.repo.manifest.rev(mnode), hex(mnode)),
1190 1190 label='ui.debug log.manifest')
1191 1191 # i18n: column positioning for "hg log"
1192 1192 self.ui.write(_("user: %s\n") % ctx.user(),
1193 1193 label='log.user')
1194 1194 # i18n: column positioning for "hg log"
1195 1195 self.ui.write(_("date: %s\n") % date,
1196 1196 label='log.date')
1197 1197
1198 1198 if self.ui.debugflag:
1199 1199 files = ctx.p1().status(ctx)[:3]
1200 1200 for key, value in zip([# i18n: column positioning for "hg log"
1201 1201 _("files:"),
1202 1202 # i18n: column positioning for "hg log"
1203 1203 _("files+:"),
1204 1204 # i18n: column positioning for "hg log"
1205 1205 _("files-:")], files):
1206 1206 if value:
1207 1207 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1208 1208 label='ui.debug log.files')
1209 1209 elif ctx.files() and self.ui.verbose:
1210 1210 # i18n: column positioning for "hg log"
1211 1211 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1212 1212 label='ui.note log.files')
1213 1213 if copies and self.ui.verbose:
1214 1214 copies = ['%s (%s)' % c for c in copies]
1215 1215 # i18n: column positioning for "hg log"
1216 1216 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1217 1217 label='ui.note log.copies')
1218 1218
1219 1219 extra = ctx.extra()
1220 1220 if extra and self.ui.debugflag:
1221 1221 for key, value in sorted(extra.items()):
1222 1222 # i18n: column positioning for "hg log"
1223 1223 self.ui.write(_("extra: %s=%s\n")
1224 1224 % (key, value.encode('string_escape')),
1225 1225 label='ui.debug log.extra')
1226 1226
1227 1227 description = ctx.description().strip()
1228 1228 if description:
1229 1229 if self.ui.verbose:
1230 1230 self.ui.write(_("description:\n"),
1231 1231 label='ui.note log.description')
1232 1232 self.ui.write(description,
1233 1233 label='ui.note log.description')
1234 1234 self.ui.write("\n\n")
1235 1235 else:
1236 1236 # i18n: column positioning for "hg log"
1237 1237 self.ui.write(_("summary: %s\n") %
1238 1238 description.splitlines()[0],
1239 1239 label='log.summary')
1240 1240 self.ui.write("\n")
1241 1241
1242 1242 self.showpatch(changenode, matchfn)
1243 1243
1244 1244 def showpatch(self, node, matchfn):
1245 1245 if not matchfn:
1246 1246 matchfn = self.matchfn
1247 1247 if matchfn:
1248 1248 stat = self.diffopts.get('stat')
1249 1249 diff = self.diffopts.get('patch')
1250 1250 diffopts = patch.diffallopts(self.ui, self.diffopts)
1251 1251 prev = self.repo.changelog.parents(node)[0]
1252 1252 if stat:
1253 1253 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1254 1254 match=matchfn, stat=True)
1255 1255 if diff:
1256 1256 if stat:
1257 1257 self.ui.write("\n")
1258 1258 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1259 1259 match=matchfn, stat=False)
1260 1260 self.ui.write("\n")
1261 1261
1262 1262 def _meaningful_parentrevs(self, ctx):
1263 1263 """Return list of meaningful (or all if debug) parentrevs for rev.
1264 1264
1265 1265 For merges (two non-nullrev revisions) both parents are meaningful.
1266 1266 Otherwise the first parent revision is considered meaningful if it
1267 1267 is not the preceding revision.
1268 1268 """
1269 1269 parents = ctx.parents()
1270 1270 if len(parents) > 1:
1271 1271 return parents
1272 1272 if self.ui.debugflag:
1273 1273 return [parents[0], self.repo['null']]
1274 1274 if parents[0].rev() >= scmutil.intrev(self.repo, ctx.rev()) - 1:
1275 1275 return []
1276 1276 return parents
1277 1277
1278 1278 class jsonchangeset(changeset_printer):
1279 1279 '''format changeset information.'''
1280 1280
1281 1281 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1282 1282 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1283 1283 self.cache = {}
1284 1284 self._first = True
1285 1285
1286 1286 def close(self):
1287 1287 if not self._first:
1288 1288 self.ui.write("\n]\n")
1289 1289 else:
1290 1290 self.ui.write("[]\n")
1291 1291
1292 1292 def _show(self, ctx, copies, matchfn, props):
1293 1293 '''show a single changeset or file revision'''
1294 1294 rev = ctx.rev()
1295 1295 if rev is None:
1296 1296 jrev = jnode = 'null'
1297 1297 else:
1298 1298 jrev = str(rev)
1299 1299 jnode = '"%s"' % hex(ctx.node())
1300 1300 j = encoding.jsonescape
1301 1301
1302 1302 if self._first:
1303 1303 self.ui.write("[\n {")
1304 1304 self._first = False
1305 1305 else:
1306 1306 self.ui.write(",\n {")
1307 1307
1308 1308 if self.ui.quiet:
1309 1309 self.ui.write('\n "rev": %s' % jrev)
1310 1310 self.ui.write(',\n "node": %s' % jnode)
1311 1311 self.ui.write('\n }')
1312 1312 return
1313 1313
1314 1314 self.ui.write('\n "rev": %s' % jrev)
1315 1315 self.ui.write(',\n "node": %s' % jnode)
1316 1316 self.ui.write(',\n "branch": "%s"' % j(ctx.branch()))
1317 1317 self.ui.write(',\n "phase": "%s"' % ctx.phasestr())
1318 1318 self.ui.write(',\n "user": "%s"' % j(ctx.user()))
1319 1319 self.ui.write(',\n "date": [%d, %d]' % ctx.date())
1320 1320 self.ui.write(',\n "desc": "%s"' % j(ctx.description()))
1321 1321
1322 1322 self.ui.write(',\n "bookmarks": [%s]' %
1323 1323 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1324 1324 self.ui.write(',\n "tags": [%s]' %
1325 1325 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1326 1326 self.ui.write(',\n "parents": [%s]' %
1327 1327 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1328 1328
1329 1329 if self.ui.debugflag:
1330 1330 if rev is None:
1331 1331 jmanifestnode = 'null'
1332 1332 else:
1333 1333 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1334 1334 self.ui.write(',\n "manifest": %s' % jmanifestnode)
1335 1335
1336 1336 self.ui.write(',\n "extra": {%s}' %
1337 1337 ", ".join('"%s": "%s"' % (j(k), j(v))
1338 1338 for k, v in ctx.extra().items()))
1339 1339
1340 1340 files = ctx.p1().status(ctx)
1341 1341 self.ui.write(',\n "modified": [%s]' %
1342 1342 ", ".join('"%s"' % j(f) for f in files[0]))
1343 1343 self.ui.write(',\n "added": [%s]' %
1344 1344 ", ".join('"%s"' % j(f) for f in files[1]))
1345 1345 self.ui.write(',\n "removed": [%s]' %
1346 1346 ", ".join('"%s"' % j(f) for f in files[2]))
1347 1347
1348 1348 elif self.ui.verbose:
1349 1349 self.ui.write(',\n "files": [%s]' %
1350 1350 ", ".join('"%s"' % j(f) for f in ctx.files()))
1351 1351
1352 1352 if copies:
1353 1353 self.ui.write(',\n "copies": {%s}' %
1354 1354 ", ".join('"%s": "%s"' % (j(k), j(v))
1355 1355 for k, v in copies))
1356 1356
1357 1357 matchfn = self.matchfn
1358 1358 if matchfn:
1359 1359 stat = self.diffopts.get('stat')
1360 1360 diff = self.diffopts.get('patch')
1361 1361 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1362 1362 node, prev = ctx.node(), ctx.p1().node()
1363 1363 if stat:
1364 1364 self.ui.pushbuffer()
1365 1365 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1366 1366 match=matchfn, stat=True)
1367 1367 self.ui.write(',\n "diffstat": "%s"' % j(self.ui.popbuffer()))
1368 1368 if diff:
1369 1369 self.ui.pushbuffer()
1370 1370 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1371 1371 match=matchfn, stat=False)
1372 1372 self.ui.write(',\n "diff": "%s"' % j(self.ui.popbuffer()))
1373 1373
1374 1374 self.ui.write("\n }")
1375 1375
1376 1376 class changeset_templater(changeset_printer):
1377 1377 '''format changeset information.'''
1378 1378
1379 1379 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1380 1380 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1381 1381 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
1382 1382 defaulttempl = {
1383 1383 'parent': '{rev}:{node|formatnode} ',
1384 1384 'manifest': '{rev}:{node|formatnode}',
1385 1385 'file_copy': '{name} ({source})',
1386 1386 'extra': '{key}={value|stringescape}'
1387 1387 }
1388 1388 # filecopy is preserved for compatibility reasons
1389 1389 defaulttempl['filecopy'] = defaulttempl['file_copy']
1390 1390 self.t = templater.templater(mapfile, {'formatnode': formatnode},
1391 1391 cache=defaulttempl)
1392 1392 if tmpl:
1393 1393 self.t.cache['changeset'] = tmpl
1394 1394
1395 1395 self.cache = {}
1396 1396
1397 1397 def _show(self, ctx, copies, matchfn, props):
1398 1398 '''show a single changeset or file revision'''
1399 1399
1400 1400 showlist = templatekw.showlist
1401 1401
1402 1402 # showparents() behaviour depends on ui trace level which
1403 1403 # causes unexpected behaviours at templating level and makes
1404 1404 # it harder to extract it in a standalone function. Its
1405 1405 # behaviour cannot be changed so leave it here for now.
1406 1406 def showparents(**args):
1407 1407 ctx = args['ctx']
1408 1408 parents = [[('rev', p.rev()),
1409 1409 ('node', p.hex()),
1410 1410 ('phase', p.phasestr())]
1411 1411 for p in self._meaningful_parentrevs(ctx)]
1412 1412 return showlist('parent', parents, **args)
1413 1413
1414 1414 props = props.copy()
1415 1415 props.update(templatekw.keywords)
1416 1416 props['parents'] = showparents
1417 1417 props['templ'] = self.t
1418 1418 props['ctx'] = ctx
1419 1419 props['repo'] = self.repo
1420 1420 props['revcache'] = {'copies': copies}
1421 1421 props['cache'] = self.cache
1422 1422
1423 1423 # find correct templates for current mode
1424 1424
1425 1425 tmplmodes = [
1426 1426 (True, None),
1427 1427 (self.ui.verbose, 'verbose'),
1428 1428 (self.ui.quiet, 'quiet'),
1429 1429 (self.ui.debugflag, 'debug'),
1430 1430 ]
1431 1431
1432 1432 types = {'header': '', 'footer':'', 'changeset': 'changeset'}
1433 1433 for mode, postfix in tmplmodes:
1434 1434 for type in types:
1435 1435 cur = postfix and ('%s_%s' % (type, postfix)) or type
1436 1436 if mode and cur in self.t:
1437 1437 types[type] = cur
1438 1438
1439 1439 try:
1440 1440
1441 1441 # write header
1442 1442 if types['header']:
1443 1443 h = templater.stringify(self.t(types['header'], **props))
1444 1444 if self.buffered:
1445 1445 self.header[ctx.rev()] = h
1446 1446 else:
1447 1447 if self.lastheader != h:
1448 1448 self.lastheader = h
1449 1449 self.ui.write(h)
1450 1450
1451 1451 # write changeset metadata, then patch if requested
1452 1452 key = types['changeset']
1453 1453 self.ui.write(templater.stringify(self.t(key, **props)))
1454 1454 self.showpatch(ctx.node(), matchfn)
1455 1455
1456 1456 if types['footer']:
1457 1457 if not self.footer:
1458 1458 self.footer = templater.stringify(self.t(types['footer'],
1459 1459 **props))
1460 1460
1461 1461 except KeyError, inst:
1462 1462 msg = _("%s: no key named '%s'")
1463 1463 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
1464 1464 except SyntaxError, inst:
1465 1465 raise util.Abort('%s: %s' % (self.t.mapfile, inst.args[0]))
1466 1466
1467 1467 def gettemplate(ui, tmpl, style):
1468 1468 """
1469 1469 Find the template matching the given template spec or style.
1470 1470 """
1471 1471
1472 1472 # ui settings
1473 1473 if not tmpl and not style: # template are stronger than style
1474 1474 tmpl = ui.config('ui', 'logtemplate')
1475 1475 if tmpl:
1476 1476 try:
1477 1477 tmpl = templater.unquotestring(tmpl)
1478 1478 except SyntaxError:
1479 1479 pass
1480 1480 return tmpl, None
1481 1481 else:
1482 1482 style = util.expandpath(ui.config('ui', 'style', ''))
1483 1483
1484 1484 if not tmpl and style:
1485 1485 mapfile = style
1486 1486 if not os.path.split(mapfile)[0]:
1487 1487 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1488 1488 or templater.templatepath(mapfile))
1489 1489 if mapname:
1490 1490 mapfile = mapname
1491 1491 return None, mapfile
1492 1492
1493 1493 if not tmpl:
1494 1494 return None, None
1495 1495
1496 1496 # looks like a literal template?
1497 1497 if '{' in tmpl:
1498 1498 return tmpl, None
1499 1499
1500 1500 # perhaps a stock style?
1501 1501 if not os.path.split(tmpl)[0]:
1502 1502 mapname = (templater.templatepath('map-cmdline.' + tmpl)
1503 1503 or templater.templatepath(tmpl))
1504 1504 if mapname and os.path.isfile(mapname):
1505 1505 return None, mapname
1506 1506
1507 1507 # perhaps it's a reference to [templates]
1508 1508 t = ui.config('templates', tmpl)
1509 1509 if t:
1510 1510 try:
1511 1511 tmpl = templater.unquotestring(t)
1512 1512 except SyntaxError:
1513 1513 tmpl = t
1514 1514 return tmpl, None
1515 1515
1516 1516 if tmpl == 'list':
1517 1517 ui.write(_("available styles: %s\n") % templater.stylelist())
1518 1518 raise util.Abort(_("specify a template"))
1519 1519
1520 1520 # perhaps it's a path to a map or a template
1521 1521 if ('/' in tmpl or '\\' in tmpl) and os.path.isfile(tmpl):
1522 1522 # is it a mapfile for a style?
1523 1523 if os.path.basename(tmpl).startswith("map-"):
1524 1524 return None, os.path.realpath(tmpl)
1525 1525 tmpl = open(tmpl).read()
1526 1526 return tmpl, None
1527 1527
1528 1528 # constant string?
1529 1529 return tmpl, None
1530 1530
1531 1531 def show_changeset(ui, repo, opts, buffered=False):
1532 1532 """show one changeset using template or regular display.
1533 1533
1534 1534 Display format will be the first non-empty hit of:
1535 1535 1. option 'template'
1536 1536 2. option 'style'
1537 1537 3. [ui] setting 'logtemplate'
1538 1538 4. [ui] setting 'style'
1539 1539 If all of these values are either the unset or the empty string,
1540 1540 regular display via changeset_printer() is done.
1541 1541 """
1542 1542 # options
1543 1543 matchfn = None
1544 1544 if opts.get('patch') or opts.get('stat'):
1545 1545 matchfn = scmutil.matchall(repo)
1546 1546
1547 1547 if opts.get('template') == 'json':
1548 1548 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1549 1549
1550 1550 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1551 1551
1552 1552 if not tmpl and not mapfile:
1553 1553 return changeset_printer(ui, repo, matchfn, opts, buffered)
1554 1554
1555 1555 try:
1556 1556 t = changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile,
1557 1557 buffered)
1558 1558 except SyntaxError, inst:
1559 1559 raise util.Abort(inst.args[0])
1560 1560 return t
1561 1561
1562 1562 def showmarker(ui, marker):
1563 1563 """utility function to display obsolescence marker in a readable way
1564 1564
1565 1565 To be used by debug function."""
1566 1566 ui.write(hex(marker.precnode()))
1567 1567 for repl in marker.succnodes():
1568 1568 ui.write(' ')
1569 1569 ui.write(hex(repl))
1570 1570 ui.write(' %X ' % marker.flags())
1571 1571 parents = marker.parentnodes()
1572 1572 if parents is not None:
1573 1573 ui.write('{%s} ' % ', '.join(hex(p) for p in parents))
1574 1574 ui.write('(%s) ' % util.datestr(marker.date()))
1575 1575 ui.write('{%s}' % (', '.join('%r: %r' % t for t in
1576 1576 sorted(marker.metadata().items())
1577 1577 if t[0] != 'date')))
1578 1578 ui.write('\n')
1579 1579
1580 1580 def finddate(ui, repo, date):
1581 1581 """Find the tipmost changeset that matches the given date spec"""
1582 1582
1583 1583 df = util.matchdate(date)
1584 1584 m = scmutil.matchall(repo)
1585 1585 results = {}
1586 1586
1587 1587 def prep(ctx, fns):
1588 1588 d = ctx.date()
1589 1589 if df(d[0]):
1590 1590 results[ctx.rev()] = d
1591 1591
1592 1592 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1593 1593 rev = ctx.rev()
1594 1594 if rev in results:
1595 1595 ui.status(_("found revision %s from %s\n") %
1596 1596 (rev, util.datestr(results[rev])))
1597 1597 return str(rev)
1598 1598
1599 1599 raise util.Abort(_("revision matching date not found"))
1600 1600
1601 1601 def increasingwindows(windowsize=8, sizelimit=512):
1602 1602 while True:
1603 1603 yield windowsize
1604 1604 if windowsize < sizelimit:
1605 1605 windowsize *= 2
1606 1606
1607 1607 class FileWalkError(Exception):
1608 1608 pass
1609 1609
1610 1610 def walkfilerevs(repo, match, follow, revs, fncache):
1611 1611 '''Walks the file history for the matched files.
1612 1612
1613 1613 Returns the changeset revs that are involved in the file history.
1614 1614
1615 1615 Throws FileWalkError if the file history can't be walked using
1616 1616 filelogs alone.
1617 1617 '''
1618 1618 wanted = set()
1619 1619 copies = []
1620 1620 minrev, maxrev = min(revs), max(revs)
1621 1621 def filerevgen(filelog, last):
1622 1622 """
1623 1623 Only files, no patterns. Check the history of each file.
1624 1624
1625 1625 Examines filelog entries within minrev, maxrev linkrev range
1626 1626 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1627 1627 tuples in backwards order
1628 1628 """
1629 1629 cl_count = len(repo)
1630 1630 revs = []
1631 1631 for j in xrange(0, last + 1):
1632 1632 linkrev = filelog.linkrev(j)
1633 1633 if linkrev < minrev:
1634 1634 continue
1635 1635 # only yield rev for which we have the changelog, it can
1636 1636 # happen while doing "hg log" during a pull or commit
1637 1637 if linkrev >= cl_count:
1638 1638 break
1639 1639
1640 1640 parentlinkrevs = []
1641 1641 for p in filelog.parentrevs(j):
1642 1642 if p != nullrev:
1643 1643 parentlinkrevs.append(filelog.linkrev(p))
1644 1644 n = filelog.node(j)
1645 1645 revs.append((linkrev, parentlinkrevs,
1646 1646 follow and filelog.renamed(n)))
1647 1647
1648 1648 return reversed(revs)
1649 1649 def iterfiles():
1650 1650 pctx = repo['.']
1651 1651 for filename in match.files():
1652 1652 if follow:
1653 1653 if filename not in pctx:
1654 1654 raise util.Abort(_('cannot follow file not in parent '
1655 1655 'revision: "%s"') % filename)
1656 1656 yield filename, pctx[filename].filenode()
1657 1657 else:
1658 1658 yield filename, None
1659 1659 for filename_node in copies:
1660 1660 yield filename_node
1661 1661
1662 1662 for file_, node in iterfiles():
1663 1663 filelog = repo.file(file_)
1664 1664 if not len(filelog):
1665 1665 if node is None:
1666 1666 # A zero count may be a directory or deleted file, so
1667 1667 # try to find matching entries on the slow path.
1668 1668 if follow:
1669 1669 raise util.Abort(
1670 1670 _('cannot follow nonexistent file: "%s"') % file_)
1671 1671 raise FileWalkError("Cannot walk via filelog")
1672 1672 else:
1673 1673 continue
1674 1674
1675 1675 if node is None:
1676 1676 last = len(filelog) - 1
1677 1677 else:
1678 1678 last = filelog.rev(node)
1679 1679
1680 1680 # keep track of all ancestors of the file
1681 1681 ancestors = set([filelog.linkrev(last)])
1682 1682
1683 1683 # iterate from latest to oldest revision
1684 1684 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1685 1685 if not follow:
1686 1686 if rev > maxrev:
1687 1687 continue
1688 1688 else:
1689 1689 # Note that last might not be the first interesting
1690 1690 # rev to us:
1691 1691 # if the file has been changed after maxrev, we'll
1692 1692 # have linkrev(last) > maxrev, and we still need
1693 1693 # to explore the file graph
1694 1694 if rev not in ancestors:
1695 1695 continue
1696 1696 # XXX insert 1327 fix here
1697 1697 if flparentlinkrevs:
1698 1698 ancestors.update(flparentlinkrevs)
1699 1699
1700 1700 fncache.setdefault(rev, []).append(file_)
1701 1701 wanted.add(rev)
1702 1702 if copied:
1703 1703 copies.append(copied)
1704 1704
1705 1705 return wanted
1706 1706
1707 1707 class _followfilter(object):
1708 1708 def __init__(self, repo, onlyfirst=False):
1709 1709 self.repo = repo
1710 1710 self.startrev = nullrev
1711 1711 self.roots = set()
1712 1712 self.onlyfirst = onlyfirst
1713 1713
1714 1714 def match(self, rev):
1715 1715 def realparents(rev):
1716 1716 if self.onlyfirst:
1717 1717 return self.repo.changelog.parentrevs(rev)[0:1]
1718 1718 else:
1719 1719 return filter(lambda x: x != nullrev,
1720 1720 self.repo.changelog.parentrevs(rev))
1721 1721
1722 1722 if self.startrev == nullrev:
1723 1723 self.startrev = rev
1724 1724 return True
1725 1725
1726 1726 if rev > self.startrev:
1727 1727 # forward: all descendants
1728 1728 if not self.roots:
1729 1729 self.roots.add(self.startrev)
1730 1730 for parent in realparents(rev):
1731 1731 if parent in self.roots:
1732 1732 self.roots.add(rev)
1733 1733 return True
1734 1734 else:
1735 1735 # backwards: all parents
1736 1736 if not self.roots:
1737 1737 self.roots.update(realparents(self.startrev))
1738 1738 if rev in self.roots:
1739 1739 self.roots.remove(rev)
1740 1740 self.roots.update(realparents(rev))
1741 1741 return True
1742 1742
1743 1743 return False
1744 1744
1745 1745 def walkchangerevs(repo, match, opts, prepare):
1746 1746 '''Iterate over files and the revs in which they changed.
1747 1747
1748 1748 Callers most commonly need to iterate backwards over the history
1749 1749 in which they are interested. Doing so has awful (quadratic-looking)
1750 1750 performance, so we use iterators in a "windowed" way.
1751 1751
1752 1752 We walk a window of revisions in the desired order. Within the
1753 1753 window, we first walk forwards to gather data, then in the desired
1754 1754 order (usually backwards) to display it.
1755 1755
1756 1756 This function returns an iterator yielding contexts. Before
1757 1757 yielding each context, the iterator will first call the prepare
1758 1758 function on each context in the window in forward order.'''
1759 1759
1760 1760 follow = opts.get('follow') or opts.get('follow_first')
1761 1761 revs = _logrevs(repo, opts)
1762 1762 if not revs:
1763 1763 return []
1764 1764 wanted = set()
1765 1765 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1766 1766 opts.get('removed'))
1767 1767 fncache = {}
1768 1768 change = repo.changectx
1769 1769
1770 1770 # First step is to fill wanted, the set of revisions that we want to yield.
1771 1771 # When it does not induce extra cost, we also fill fncache for revisions in
1772 1772 # wanted: a cache of filenames that were changed (ctx.files()) and that
1773 1773 # match the file filtering conditions.
1774 1774
1775 1775 if match.always():
1776 1776 # No files, no patterns. Display all revs.
1777 1777 wanted = revs
1778 1778 elif not slowpath:
1779 1779 # We only have to read through the filelog to find wanted revisions
1780 1780
1781 1781 try:
1782 1782 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1783 1783 except FileWalkError:
1784 1784 slowpath = True
1785 1785
1786 1786 # We decided to fall back to the slowpath because at least one
1787 1787 # of the paths was not a file. Check to see if at least one of them
1788 1788 # existed in history, otherwise simply return
1789 1789 for path in match.files():
1790 1790 if path == '.' or path in repo.store:
1791 1791 break
1792 1792 else:
1793 1793 return []
1794 1794
1795 1795 if slowpath:
1796 1796 # We have to read the changelog to match filenames against
1797 1797 # changed files
1798 1798
1799 1799 if follow:
1800 1800 raise util.Abort(_('can only follow copies/renames for explicit '
1801 1801 'filenames'))
1802 1802
1803 1803 # The slow path checks files modified in every changeset.
1804 1804 # This is really slow on large repos, so compute the set lazily.
1805 1805 class lazywantedset(object):
1806 1806 def __init__(self):
1807 1807 self.set = set()
1808 1808 self.revs = set(revs)
1809 1809
1810 1810 # No need to worry about locality here because it will be accessed
1811 1811 # in the same order as the increasing window below.
1812 1812 def __contains__(self, value):
1813 1813 if value in self.set:
1814 1814 return True
1815 1815 elif not value in self.revs:
1816 1816 return False
1817 1817 else:
1818 1818 self.revs.discard(value)
1819 1819 ctx = change(value)
1820 1820 matches = filter(match, ctx.files())
1821 1821 if matches:
1822 1822 fncache[value] = matches
1823 1823 self.set.add(value)
1824 1824 return True
1825 1825 return False
1826 1826
1827 1827 def discard(self, value):
1828 1828 self.revs.discard(value)
1829 1829 self.set.discard(value)
1830 1830
1831 1831 wanted = lazywantedset()
1832 1832
1833 1833 # it might be worthwhile to do this in the iterator if the rev range
1834 1834 # is descending and the prune args are all within that range
1835 1835 for rev in opts.get('prune', ()):
1836 1836 rev = repo[rev].rev()
1837 1837 ff = _followfilter(repo)
1838 1838 stop = min(revs[0], revs[-1])
1839 1839 for x in xrange(rev, stop - 1, -1):
1840 1840 if ff.match(x):
1841 1841 wanted = wanted - [x]
1842 1842
1843 1843 # Now that wanted is correctly initialized, we can iterate over the
1844 1844 # revision range, yielding only revisions in wanted.
1845 1845 def iterate():
1846 1846 if follow and match.always():
1847 1847 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1848 1848 def want(rev):
1849 1849 return ff.match(rev) and rev in wanted
1850 1850 else:
1851 1851 def want(rev):
1852 1852 return rev in wanted
1853 1853
1854 1854 it = iter(revs)
1855 1855 stopiteration = False
1856 1856 for windowsize in increasingwindows():
1857 1857 nrevs = []
1858 1858 for i in xrange(windowsize):
1859 1859 rev = next(it, None)
1860 1860 if rev is None:
1861 1861 stopiteration = True
1862 1862 break
1863 1863 elif want(rev):
1864 1864 nrevs.append(rev)
1865 1865 for rev in sorted(nrevs):
1866 1866 fns = fncache.get(rev)
1867 1867 ctx = change(rev)
1868 1868 if not fns:
1869 1869 def fns_generator():
1870 1870 for f in ctx.files():
1871 1871 if match(f):
1872 1872 yield f
1873 1873 fns = fns_generator()
1874 1874 prepare(ctx, fns)
1875 1875 for rev in nrevs:
1876 1876 yield change(rev)
1877 1877
1878 1878 if stopiteration:
1879 1879 break
1880 1880
1881 1881 return iterate()
1882 1882
1883 1883 def _makefollowlogfilematcher(repo, files, followfirst):
1884 1884 # When displaying a revision with --patch --follow FILE, we have
1885 1885 # to know which file of the revision must be diffed. With
1886 1886 # --follow, we want the names of the ancestors of FILE in the
1887 1887 # revision, stored in "fcache". "fcache" is populated by
1888 1888 # reproducing the graph traversal already done by --follow revset
1889 1889 # and relating linkrevs to file names (which is not "correct" but
1890 1890 # good enough).
1891 1891 fcache = {}
1892 1892 fcacheready = [False]
1893 1893 pctx = repo['.']
1894 1894
1895 1895 def populate():
1896 1896 for fn in files:
1897 1897 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
1898 1898 for c in i:
1899 1899 fcache.setdefault(c.linkrev(), set()).add(c.path())
1900 1900
1901 1901 def filematcher(rev):
1902 1902 if not fcacheready[0]:
1903 1903 # Lazy initialization
1904 1904 fcacheready[0] = True
1905 1905 populate()
1906 1906 return scmutil.matchfiles(repo, fcache.get(rev, []))
1907 1907
1908 1908 return filematcher
1909 1909
1910 1910 def _makenofollowlogfilematcher(repo, pats, opts):
1911 1911 '''hook for extensions to override the filematcher for non-follow cases'''
1912 1912 return None
1913 1913
1914 1914 def _makelogrevset(repo, pats, opts, revs):
1915 1915 """Return (expr, filematcher) where expr is a revset string built
1916 1916 from log options and file patterns or None. If --stat or --patch
1917 1917 are not passed filematcher is None. Otherwise it is a callable
1918 1918 taking a revision number and returning a match objects filtering
1919 1919 the files to be detailed when displaying the revision.
1920 1920 """
1921 1921 opt2revset = {
1922 1922 'no_merges': ('not merge()', None),
1923 1923 'only_merges': ('merge()', None),
1924 1924 '_ancestors': ('ancestors(%(val)s)', None),
1925 1925 '_fancestors': ('_firstancestors(%(val)s)', None),
1926 1926 '_descendants': ('descendants(%(val)s)', None),
1927 1927 '_fdescendants': ('_firstdescendants(%(val)s)', None),
1928 1928 '_matchfiles': ('_matchfiles(%(val)s)', None),
1929 1929 'date': ('date(%(val)r)', None),
1930 1930 'branch': ('branch(%(val)r)', ' or '),
1931 1931 '_patslog': ('filelog(%(val)r)', ' or '),
1932 1932 '_patsfollow': ('follow(%(val)r)', ' or '),
1933 1933 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
1934 1934 'keyword': ('keyword(%(val)r)', ' or '),
1935 1935 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
1936 1936 'user': ('user(%(val)r)', ' or '),
1937 1937 }
1938 1938
1939 1939 opts = dict(opts)
1940 1940 # follow or not follow?
1941 1941 follow = opts.get('follow') or opts.get('follow_first')
1942 1942 if opts.get('follow_first'):
1943 1943 followfirst = 1
1944 1944 else:
1945 1945 followfirst = 0
1946 1946 # --follow with FILE behaviour depends on revs...
1947 1947 it = iter(revs)
1948 1948 startrev = it.next()
1949 1949 followdescendants = startrev < next(it, startrev)
1950 1950
1951 1951 # branch and only_branch are really aliases and must be handled at
1952 1952 # the same time
1953 1953 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
1954 1954 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
1955 1955 # pats/include/exclude are passed to match.match() directly in
1956 1956 # _matchfiles() revset but walkchangerevs() builds its matcher with
1957 1957 # scmutil.match(). The difference is input pats are globbed on
1958 1958 # platforms without shell expansion (windows).
1959 1959 wctx = repo[None]
1960 1960 match, pats = scmutil.matchandpats(wctx, pats, opts)
1961 1961 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1962 1962 opts.get('removed'))
1963 1963 if not slowpath:
1964 1964 for f in match.files():
1965 1965 if follow and f not in wctx:
1966 1966 # If the file exists, it may be a directory, so let it
1967 1967 # take the slow path.
1968 1968 if os.path.exists(repo.wjoin(f)):
1969 1969 slowpath = True
1970 1970 continue
1971 1971 else:
1972 1972 raise util.Abort(_('cannot follow file not in parent '
1973 1973 'revision: "%s"') % f)
1974 1974 filelog = repo.file(f)
1975 1975 if not filelog:
1976 1976 # A zero count may be a directory or deleted file, so
1977 1977 # try to find matching entries on the slow path.
1978 1978 if follow:
1979 1979 raise util.Abort(
1980 1980 _('cannot follow nonexistent file: "%s"') % f)
1981 1981 slowpath = True
1982 1982
1983 1983 # We decided to fall back to the slowpath because at least one
1984 1984 # of the paths was not a file. Check to see if at least one of them
1985 1985 # existed in history - in that case, we'll continue down the
1986 1986 # slowpath; otherwise, we can turn off the slowpath
1987 1987 if slowpath:
1988 1988 for path in match.files():
1989 1989 if path == '.' or path in repo.store:
1990 1990 break
1991 1991 else:
1992 1992 slowpath = False
1993 1993
1994 1994 fpats = ('_patsfollow', '_patsfollowfirst')
1995 1995 fnopats = (('_ancestors', '_fancestors'),
1996 1996 ('_descendants', '_fdescendants'))
1997 1997 if slowpath:
1998 1998 # See walkchangerevs() slow path.
1999 1999 #
2000 2000 # pats/include/exclude cannot be represented as separate
2001 2001 # revset expressions as their filtering logic applies at file
2002 2002 # level. For instance "-I a -X a" matches a revision touching
2003 2003 # "a" and "b" while "file(a) and not file(b)" does
2004 2004 # not. Besides, filesets are evaluated against the working
2005 2005 # directory.
2006 2006 matchargs = ['r:', 'd:relpath']
2007 2007 for p in pats:
2008 2008 matchargs.append('p:' + p)
2009 2009 for p in opts.get('include', []):
2010 2010 matchargs.append('i:' + p)
2011 2011 for p in opts.get('exclude', []):
2012 2012 matchargs.append('x:' + p)
2013 2013 matchargs = ','.join(('%r' % p) for p in matchargs)
2014 2014 opts['_matchfiles'] = matchargs
2015 2015 if follow:
2016 2016 opts[fnopats[0][followfirst]] = '.'
2017 2017 else:
2018 2018 if follow:
2019 2019 if pats:
2020 2020 # follow() revset interprets its file argument as a
2021 2021 # manifest entry, so use match.files(), not pats.
2022 2022 opts[fpats[followfirst]] = list(match.files())
2023 2023 else:
2024 2024 op = fnopats[followdescendants][followfirst]
2025 2025 opts[op] = 'rev(%d)' % startrev
2026 2026 else:
2027 2027 opts['_patslog'] = list(pats)
2028 2028
2029 2029 filematcher = None
2030 2030 if opts.get('patch') or opts.get('stat'):
2031 2031 # When following files, track renames via a special matcher.
2032 2032 # If we're forced to take the slowpath it means we're following
2033 2033 # at least one pattern/directory, so don't bother with rename tracking.
2034 2034 if follow and not match.always() and not slowpath:
2035 2035 # _makefollowlogfilematcher expects its files argument to be
2036 2036 # relative to the repo root, so use match.files(), not pats.
2037 2037 filematcher = _makefollowlogfilematcher(repo, match.files(),
2038 2038 followfirst)
2039 2039 else:
2040 2040 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2041 2041 if filematcher is None:
2042 2042 filematcher = lambda rev: match
2043 2043
2044 2044 expr = []
2045 2045 for op, val in sorted(opts.iteritems()):
2046 2046 if not val:
2047 2047 continue
2048 2048 if op not in opt2revset:
2049 2049 continue
2050 2050 revop, andor = opt2revset[op]
2051 2051 if '%(val)' not in revop:
2052 2052 expr.append(revop)
2053 2053 else:
2054 2054 if not isinstance(val, list):
2055 2055 e = revop % {'val': val}
2056 2056 else:
2057 2057 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2058 2058 expr.append(e)
2059 2059
2060 2060 if expr:
2061 2061 expr = '(' + ' and '.join(expr) + ')'
2062 2062 else:
2063 2063 expr = None
2064 2064 return expr, filematcher
2065 2065
2066 2066 def _logrevs(repo, opts):
2067 2067 # Default --rev value depends on --follow but --follow behaviour
2068 2068 # depends on revisions resolved from --rev...
2069 2069 follow = opts.get('follow') or opts.get('follow_first')
2070 2070 if opts.get('rev'):
2071 2071 revs = scmutil.revrange(repo, opts['rev'])
2072 2072 elif follow and repo.dirstate.p1() == nullid:
2073 2073 revs = revset.baseset()
2074 2074 elif follow:
2075 2075 revs = repo.revs('reverse(:.)')
2076 2076 else:
2077 2077 revs = revset.spanset(repo)
2078 2078 revs.reverse()
2079 2079 return revs
2080 2080
2081 2081 def getgraphlogrevs(repo, pats, opts):
2082 2082 """Return (revs, expr, filematcher) where revs is an iterable of
2083 2083 revision numbers, expr is a revset string built from log options
2084 2084 and file patterns or None, and used to filter 'revs'. If --stat or
2085 2085 --patch are not passed filematcher is None. Otherwise it is a
2086 2086 callable taking a revision number and returning a match objects
2087 2087 filtering the files to be detailed when displaying the revision.
2088 2088 """
2089 2089 limit = loglimit(opts)
2090 2090 revs = _logrevs(repo, opts)
2091 2091 if not revs:
2092 2092 return revset.baseset(), None, None
2093 2093 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2094 2094 if opts.get('rev'):
2095 2095 # User-specified revs might be unsorted, but don't sort before
2096 2096 # _makelogrevset because it might depend on the order of revs
2097 2097 revs.sort(reverse=True)
2098 2098 if expr:
2099 2099 # Revset matchers often operate faster on revisions in changelog
2100 2100 # order, because most filters deal with the changelog.
2101 2101 revs.reverse()
2102 2102 matcher = revset.match(repo.ui, expr)
2103 2103 # Revset matches can reorder revisions. "A or B" typically returns
2104 2104 # returns the revision matching A then the revision matching B. Sort
2105 2105 # again to fix that.
2106 2106 revs = matcher(repo, revs)
2107 2107 revs.sort(reverse=True)
2108 2108 if limit is not None:
2109 2109 limitedrevs = []
2110 2110 for idx, rev in enumerate(revs):
2111 2111 if idx >= limit:
2112 2112 break
2113 2113 limitedrevs.append(rev)
2114 2114 revs = revset.baseset(limitedrevs)
2115 2115
2116 2116 return revs, expr, filematcher
2117 2117
2118 2118 def getlogrevs(repo, pats, opts):
2119 2119 """Return (revs, expr, filematcher) where revs is an iterable of
2120 2120 revision numbers, expr is a revset string built from log options
2121 2121 and file patterns or None, and used to filter 'revs'. If --stat or
2122 2122 --patch are not passed filematcher is None. Otherwise it is a
2123 2123 callable taking a revision number and returning a match objects
2124 2124 filtering the files to be detailed when displaying the revision.
2125 2125 """
2126 2126 limit = loglimit(opts)
2127 2127 revs = _logrevs(repo, opts)
2128 2128 if not revs:
2129 2129 return revset.baseset([]), None, None
2130 2130 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2131 2131 if expr:
2132 2132 # Revset matchers often operate faster on revisions in changelog
2133 2133 # order, because most filters deal with the changelog.
2134 2134 if not opts.get('rev'):
2135 2135 revs.reverse()
2136 2136 matcher = revset.match(repo.ui, expr)
2137 2137 # Revset matches can reorder revisions. "A or B" typically returns
2138 2138 # returns the revision matching A then the revision matching B. Sort
2139 2139 # again to fix that.
2140 2140 revs = matcher(repo, revs)
2141 2141 if not opts.get('rev'):
2142 2142 revs.sort(reverse=True)
2143 2143 if limit is not None:
2144 2144 limitedrevs = []
2145 2145 for idx, r in enumerate(revs):
2146 2146 if limit <= idx:
2147 2147 break
2148 2148 limitedrevs.append(r)
2149 2149 revs = revset.baseset(limitedrevs)
2150 2150
2151 2151 return revs, expr, filematcher
2152 2152
2153 2153 def displaygraph(ui, dag, displayer, showparents, edgefn, getrenamed=None,
2154 2154 filematcher=None):
2155 2155 seen, state = [], graphmod.asciistate()
2156 2156 for rev, type, ctx, parents in dag:
2157 2157 char = 'o'
2158 2158 if ctx.node() in showparents:
2159 2159 char = '@'
2160 2160 elif ctx.obsolete():
2161 2161 char = 'x'
2162 2162 elif ctx.closesbranch():
2163 2163 char = '_'
2164 2164 copies = None
2165 2165 if getrenamed and ctx.rev():
2166 2166 copies = []
2167 2167 for fn in ctx.files():
2168 2168 rename = getrenamed(fn, ctx.rev())
2169 2169 if rename:
2170 2170 copies.append((fn, rename[0]))
2171 2171 revmatchfn = None
2172 2172 if filematcher is not None:
2173 2173 revmatchfn = filematcher(ctx.rev())
2174 2174 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2175 2175 lines = displayer.hunk.pop(rev).split('\n')
2176 2176 if not lines[-1]:
2177 2177 del lines[-1]
2178 2178 displayer.flush(rev)
2179 2179 edges = edgefn(type, char, lines, seen, rev, parents)
2180 2180 for type, char, lines, coldata in edges:
2181 2181 graphmod.ascii(ui, state, type, char, lines, coldata)
2182 2182 displayer.close()
2183 2183
2184 2184 def graphlog(ui, repo, *pats, **opts):
2185 2185 # Parameters are identical to log command ones
2186 2186 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2187 2187 revdag = graphmod.dagwalker(repo, revs)
2188 2188
2189 2189 getrenamed = None
2190 2190 if opts.get('copies'):
2191 2191 endrev = None
2192 2192 if opts.get('rev'):
2193 2193 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2194 2194 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2195 2195 displayer = show_changeset(ui, repo, opts, buffered=True)
2196 2196 showparents = [ctx.node() for ctx in repo[None].parents()]
2197 2197 displaygraph(ui, revdag, displayer, showparents,
2198 2198 graphmod.asciiedges, getrenamed, filematcher)
2199 2199
2200 2200 def checkunsupportedgraphflags(pats, opts):
2201 2201 for op in ["newest_first"]:
2202 2202 if op in opts and opts[op]:
2203 2203 raise util.Abort(_("-G/--graph option is incompatible with --%s")
2204 2204 % op.replace("_", "-"))
2205 2205
2206 2206 def graphrevs(repo, nodes, opts):
2207 2207 limit = loglimit(opts)
2208 2208 nodes.reverse()
2209 2209 if limit is not None:
2210 2210 nodes = nodes[:limit]
2211 2211 return graphmod.nodes(repo, nodes)
2212 2212
2213 2213 def add(ui, repo, match, prefix, explicitonly, **opts):
2214 2214 join = lambda f: os.path.join(prefix, f)
2215 2215 bad = []
2216 2216 oldbad = match.bad
2217 2217 match.bad = lambda x, y: bad.append(x) or oldbad(x, y)
2218 2218 names = []
2219 2219 wctx = repo[None]
2220 2220 cca = None
2221 2221 abort, warn = scmutil.checkportabilityalert(ui)
2222 2222 if abort or warn:
2223 2223 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2224 2224 for f in wctx.walk(match):
2225 2225 exact = match.exact(f)
2226 2226 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2227 2227 if cca:
2228 2228 cca(f)
2229 2229 names.append(f)
2230 2230 if ui.verbose or not exact:
2231 2231 ui.status(_('adding %s\n') % match.rel(f))
2232 2232
2233 2233 for subpath in sorted(wctx.substate):
2234 2234 sub = wctx.sub(subpath)
2235 2235 try:
2236 2236 submatch = matchmod.narrowmatcher(subpath, match)
2237 2237 if opts.get('subrepos'):
2238 2238 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2239 2239 else:
2240 2240 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2241 2241 except error.LookupError:
2242 2242 ui.status(_("skipping missing subrepository: %s\n")
2243 2243 % join(subpath))
2244 2244
2245 2245 if not opts.get('dry_run'):
2246 2246 rejected = wctx.add(names, prefix)
2247 2247 bad.extend(f for f in rejected if f in match.files())
2248 2248 return bad
2249 2249
2250 2250 def forget(ui, repo, match, prefix, explicitonly):
2251 2251 join = lambda f: os.path.join(prefix, f)
2252 2252 bad = []
2253 2253 oldbad = match.bad
2254 2254 match.bad = lambda x, y: bad.append(x) or oldbad(x, y)
2255 2255 wctx = repo[None]
2256 2256 forgot = []
2257 2257 s = repo.status(match=match, clean=True)
2258 2258 forget = sorted(s[0] + s[1] + s[3] + s[6])
2259 2259 if explicitonly:
2260 2260 forget = [f for f in forget if match.exact(f)]
2261 2261
2262 2262 for subpath in sorted(wctx.substate):
2263 2263 sub = wctx.sub(subpath)
2264 2264 try:
2265 2265 submatch = matchmod.narrowmatcher(subpath, match)
2266 2266 subbad, subforgot = sub.forget(submatch, prefix)
2267 2267 bad.extend([subpath + '/' + f for f in subbad])
2268 2268 forgot.extend([subpath + '/' + f for f in subforgot])
2269 2269 except error.LookupError:
2270 2270 ui.status(_("skipping missing subrepository: %s\n")
2271 2271 % join(subpath))
2272 2272
2273 2273 if not explicitonly:
2274 2274 for f in match.files():
2275 2275 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2276 2276 if f not in forgot:
2277 2277 if repo.wvfs.exists(f):
2278 2278 # Don't complain if the exact case match wasn't given.
2279 2279 # But don't do this until after checking 'forgot', so
2280 2280 # that subrepo files aren't normalized, and this op is
2281 2281 # purely from data cached by the status walk above.
2282 2282 if repo.dirstate.normalize(f) in repo.dirstate:
2283 2283 continue
2284 2284 ui.warn(_('not removing %s: '
2285 2285 'file is already untracked\n')
2286 2286 % match.rel(f))
2287 2287 bad.append(f)
2288 2288
2289 2289 for f in forget:
2290 2290 if ui.verbose or not match.exact(f):
2291 2291 ui.status(_('removing %s\n') % match.rel(f))
2292 2292
2293 2293 rejected = wctx.forget(forget, prefix)
2294 2294 bad.extend(f for f in rejected if f in match.files())
2295 2295 forgot.extend(f for f in forget if f not in rejected)
2296 2296 return bad, forgot
2297 2297
2298 2298 def files(ui, ctx, m, fm, fmt, subrepos):
2299 2299 rev = ctx.rev()
2300 2300 ret = 1
2301 2301 ds = ctx.repo().dirstate
2302 2302
2303 2303 for f in ctx.matches(m):
2304 2304 if rev is None and ds[f] == 'r':
2305 2305 continue
2306 2306 fm.startitem()
2307 2307 if ui.verbose:
2308 2308 fc = ctx[f]
2309 2309 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2310 2310 fm.data(abspath=f)
2311 2311 fm.write('path', fmt, m.rel(f))
2312 2312 ret = 0
2313 2313
2314 2314 for subpath in sorted(ctx.substate):
2315 2315 def matchessubrepo(subpath):
2316 2316 return (m.always() or m.exact(subpath)
2317 2317 or any(f.startswith(subpath + '/') for f in m.files()))
2318 2318
2319 2319 if subrepos or matchessubrepo(subpath):
2320 2320 sub = ctx.sub(subpath)
2321 2321 try:
2322 2322 submatch = matchmod.narrowmatcher(subpath, m)
2323 2323 if sub.printfiles(ui, submatch, fm, fmt, subrepos) == 0:
2324 2324 ret = 0
2325 2325 except error.LookupError:
2326 2326 ui.status(_("skipping missing subrepository: %s\n")
2327 2327 % m.abs(subpath))
2328 2328
2329 2329 return ret
2330 2330
2331 2331 def remove(ui, repo, m, prefix, after, force, subrepos):
2332 2332 join = lambda f: os.path.join(prefix, f)
2333 2333 ret = 0
2334 2334 s = repo.status(match=m, clean=True)
2335 2335 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2336 2336
2337 2337 wctx = repo[None]
2338 2338
2339 2339 for subpath in sorted(wctx.substate):
2340 2340 def matchessubrepo(matcher, subpath):
2341 2341 if matcher.exact(subpath):
2342 2342 return True
2343 2343 for f in matcher.files():
2344 2344 if f.startswith(subpath):
2345 2345 return True
2346 2346 return False
2347 2347
2348 2348 if subrepos or matchessubrepo(m, subpath):
2349 2349 sub = wctx.sub(subpath)
2350 2350 try:
2351 2351 submatch = matchmod.narrowmatcher(subpath, m)
2352 2352 if sub.removefiles(submatch, prefix, after, force, subrepos):
2353 2353 ret = 1
2354 2354 except error.LookupError:
2355 2355 ui.status(_("skipping missing subrepository: %s\n")
2356 2356 % join(subpath))
2357 2357
2358 2358 # warn about failure to delete explicit files/dirs
2359 2359 deleteddirs = util.dirs(deleted)
2360 2360 for f in m.files():
2361 2361 def insubrepo():
2362 2362 for subpath in wctx.substate:
2363 2363 if f.startswith(subpath):
2364 2364 return True
2365 2365 return False
2366 2366
2367 2367 isdir = f in deleteddirs or wctx.hasdir(f)
2368 2368 if f in repo.dirstate or isdir or f == '.' or insubrepo():
2369 2369 continue
2370 2370
2371 2371 if repo.wvfs.exists(f):
2372 2372 if repo.wvfs.isdir(f):
2373 2373 ui.warn(_('not removing %s: no tracked files\n')
2374 2374 % m.rel(f))
2375 2375 else:
2376 2376 ui.warn(_('not removing %s: file is untracked\n')
2377 2377 % m.rel(f))
2378 2378 # missing files will generate a warning elsewhere
2379 2379 ret = 1
2380 2380
2381 2381 if force:
2382 2382 list = modified + deleted + clean + added
2383 2383 elif after:
2384 2384 list = deleted
2385 2385 for f in modified + added + clean:
2386 2386 ui.warn(_('not removing %s: file still exists\n') % m.rel(f))
2387 2387 ret = 1
2388 2388 else:
2389 2389 list = deleted + clean
2390 2390 for f in modified:
2391 2391 ui.warn(_('not removing %s: file is modified (use -f'
2392 2392 ' to force removal)\n') % m.rel(f))
2393 2393 ret = 1
2394 2394 for f in added:
2395 2395 ui.warn(_('not removing %s: file has been marked for add'
2396 2396 ' (use forget to undo)\n') % m.rel(f))
2397 2397 ret = 1
2398 2398
2399 2399 for f in sorted(list):
2400 2400 if ui.verbose or not m.exact(f):
2401 2401 ui.status(_('removing %s\n') % m.rel(f))
2402 2402
2403 2403 wlock = repo.wlock()
2404 2404 try:
2405 2405 if not after:
2406 2406 for f in list:
2407 2407 if f in added:
2408 2408 continue # we never unlink added files on remove
2409 2409 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
2410 2410 repo[None].forget(list)
2411 2411 finally:
2412 2412 wlock.release()
2413 2413
2414 2414 return ret
2415 2415
2416 2416 def cat(ui, repo, ctx, matcher, prefix, **opts):
2417 2417 err = 1
2418 2418
2419 2419 def write(path):
2420 2420 fp = makefileobj(repo, opts.get('output'), ctx.node(),
2421 2421 pathname=os.path.join(prefix, path))
2422 2422 data = ctx[path].data()
2423 2423 if opts.get('decode'):
2424 2424 data = repo.wwritedata(path, data)
2425 2425 fp.write(data)
2426 2426 fp.close()
2427 2427
2428 2428 # Automation often uses hg cat on single files, so special case it
2429 2429 # for performance to avoid the cost of parsing the manifest.
2430 2430 if len(matcher.files()) == 1 and not matcher.anypats():
2431 2431 file = matcher.files()[0]
2432 2432 mf = repo.manifest
2433 2433 mfnode = ctx.manifestnode()
2434 2434 if mfnode and mf.find(mfnode, file)[0]:
2435 2435 write(file)
2436 2436 return 0
2437 2437
2438 2438 # Don't warn about "missing" files that are really in subrepos
2439 2439 bad = matcher.bad
2440 2440
2441 2441 def badfn(path, msg):
2442 2442 for subpath in ctx.substate:
2443 2443 if path.startswith(subpath):
2444 2444 return
2445 2445 bad(path, msg)
2446 2446
2447 2447 matcher.bad = badfn
2448 2448
2449 2449 for abs in ctx.walk(matcher):
2450 2450 write(abs)
2451 2451 err = 0
2452 2452
2453 2453 matcher.bad = bad
2454 2454
2455 2455 for subpath in sorted(ctx.substate):
2456 2456 sub = ctx.sub(subpath)
2457 2457 try:
2458 2458 submatch = matchmod.narrowmatcher(subpath, matcher)
2459 2459
2460 2460 if not sub.cat(submatch, os.path.join(prefix, sub._path),
2461 2461 **opts):
2462 2462 err = 0
2463 2463 except error.RepoLookupError:
2464 2464 ui.status(_("skipping missing subrepository: %s\n")
2465 2465 % os.path.join(prefix, subpath))
2466 2466
2467 2467 return err
2468 2468
2469 2469 def commit(ui, repo, commitfunc, pats, opts):
2470 2470 '''commit the specified files or all outstanding changes'''
2471 2471 date = opts.get('date')
2472 2472 if date:
2473 2473 opts['date'] = util.parsedate(date)
2474 2474 message = logmessage(ui, opts)
2475 2475 matcher = scmutil.match(repo[None], pats, opts)
2476 2476
2477 2477 # extract addremove carefully -- this function can be called from a command
2478 2478 # that doesn't support addremove
2479 2479 if opts.get('addremove'):
2480 2480 if scmutil.addremove(repo, matcher, "", opts) != 0:
2481 2481 raise util.Abort(
2482 2482 _("failed to mark all new/missing files as added/removed"))
2483 2483
2484 2484 return commitfunc(ui, repo, message, matcher, opts)
2485 2485
2486 2486 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2487 2487 # amend will reuse the existing user if not specified, but the obsolete
2488 2488 # marker creation requires that the current user's name is specified.
2489 2489 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2490 2490 ui.username() # raise exception if username not set
2491 2491
2492 2492 ui.note(_('amending changeset %s\n') % old)
2493 2493 base = old.p1()
2494 2494
2495 2495 wlock = dsguard = lock = newid = None
2496 2496 try:
2497 2497 wlock = repo.wlock()
2498 2498 dsguard = dirstateguard(repo, 'amend')
2499 2499 lock = repo.lock()
2500 2500 tr = repo.transaction('amend')
2501 2501 try:
2502 2502 # See if we got a message from -m or -l, if not, open the editor
2503 2503 # with the message of the changeset to amend
2504 2504 message = logmessage(ui, opts)
2505 2505 # ensure logfile does not conflict with later enforcement of the
2506 2506 # message. potential logfile content has been processed by
2507 2507 # `logmessage` anyway.
2508 2508 opts.pop('logfile')
2509 2509 # First, do a regular commit to record all changes in the working
2510 2510 # directory (if there are any)
2511 2511 ui.callhooks = False
2512 2512 activebookmark = repo._activebookmark
2513 2513 try:
2514 2514 repo._activebookmark = None
2515 2515 opts['message'] = 'temporary amend commit for %s' % old
2516 2516 node = commit(ui, repo, commitfunc, pats, opts)
2517 2517 finally:
2518 2518 repo._activebookmark = activebookmark
2519 2519 ui.callhooks = True
2520 2520 ctx = repo[node]
2521 2521
2522 2522 # Participating changesets:
2523 2523 #
2524 2524 # node/ctx o - new (intermediate) commit that contains changes
2525 2525 # | from working dir to go into amending commit
2526 2526 # | (or a workingctx if there were no changes)
2527 2527 # |
2528 2528 # old o - changeset to amend
2529 2529 # |
2530 2530 # base o - parent of amending changeset
2531 2531
2532 2532 # Update extra dict from amended commit (e.g. to preserve graft
2533 2533 # source)
2534 2534 extra.update(old.extra())
2535 2535
2536 2536 # Also update it from the intermediate commit or from the wctx
2537 2537 extra.update(ctx.extra())
2538 2538
2539 2539 if len(old.parents()) > 1:
2540 2540 # ctx.files() isn't reliable for merges, so fall back to the
2541 2541 # slower repo.status() method
2542 2542 files = set([fn for st in repo.status(base, old)[:3]
2543 2543 for fn in st])
2544 2544 else:
2545 2545 files = set(old.files())
2546 2546
2547 2547 # Second, we use either the commit we just did, or if there were no
2548 2548 # changes the parent of the working directory as the version of the
2549 2549 # files in the final amend commit
2550 2550 if node:
2551 2551 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2552 2552
2553 2553 user = ctx.user()
2554 2554 date = ctx.date()
2555 2555 # Recompute copies (avoid recording a -> b -> a)
2556 2556 copied = copies.pathcopies(base, ctx)
2557 2557 if old.p2:
2558 2558 copied.update(copies.pathcopies(old.p2(), ctx))
2559 2559
2560 2560 # Prune files which were reverted by the updates: if old
2561 2561 # introduced file X and our intermediate commit, node,
2562 2562 # renamed that file, then those two files are the same and
2563 2563 # we can discard X from our list of files. Likewise if X
2564 2564 # was deleted, it's no longer relevant
2565 2565 files.update(ctx.files())
2566 2566
2567 2567 def samefile(f):
2568 2568 if f in ctx.manifest():
2569 2569 a = ctx.filectx(f)
2570 2570 if f in base.manifest():
2571 2571 b = base.filectx(f)
2572 2572 return (not a.cmp(b)
2573 2573 and a.flags() == b.flags())
2574 2574 else:
2575 2575 return False
2576 2576 else:
2577 2577 return f not in base.manifest()
2578 2578 files = [f for f in files if not samefile(f)]
2579 2579
2580 2580 def filectxfn(repo, ctx_, path):
2581 2581 try:
2582 2582 fctx = ctx[path]
2583 2583 flags = fctx.flags()
2584 2584 mctx = context.memfilectx(repo,
2585 2585 fctx.path(), fctx.data(),
2586 2586 islink='l' in flags,
2587 2587 isexec='x' in flags,
2588 2588 copied=copied.get(path))
2589 2589 return mctx
2590 2590 except KeyError:
2591 2591 return None
2592 2592 else:
2593 2593 ui.note(_('copying changeset %s to %s\n') % (old, base))
2594 2594
2595 2595 # Use version of files as in the old cset
2596 2596 def filectxfn(repo, ctx_, path):
2597 2597 try:
2598 2598 return old.filectx(path)
2599 2599 except KeyError:
2600 2600 return None
2601 2601
2602 2602 user = opts.get('user') or old.user()
2603 2603 date = opts.get('date') or old.date()
2604 2604 editform = mergeeditform(old, 'commit.amend')
2605 2605 editor = getcommiteditor(editform=editform, **opts)
2606 2606 if not message:
2607 2607 editor = getcommiteditor(edit=True, editform=editform)
2608 2608 message = old.description()
2609 2609
2610 2610 pureextra = extra.copy()
2611 2611 extra['amend_source'] = old.hex()
2612 2612
2613 2613 new = context.memctx(repo,
2614 2614 parents=[base.node(), old.p2().node()],
2615 2615 text=message,
2616 2616 files=files,
2617 2617 filectxfn=filectxfn,
2618 2618 user=user,
2619 2619 date=date,
2620 2620 extra=extra,
2621 2621 editor=editor)
2622 2622
2623 2623 newdesc = changelog.stripdesc(new.description())
2624 2624 if ((not node)
2625 2625 and newdesc == old.description()
2626 2626 and user == old.user()
2627 2627 and date == old.date()
2628 2628 and pureextra == old.extra()):
2629 2629 # nothing changed. continuing here would create a new node
2630 2630 # anyway because of the amend_source noise.
2631 2631 #
2632 2632 # This not what we expect from amend.
2633 2633 return old.node()
2634 2634
2635 2635 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2636 2636 try:
2637 2637 if opts.get('secret'):
2638 2638 commitphase = 'secret'
2639 2639 else:
2640 2640 commitphase = old.phase()
2641 2641 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2642 2642 newid = repo.commitctx(new)
2643 2643 finally:
2644 2644 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2645 2645 if newid != old.node():
2646 2646 # Reroute the working copy parent to the new changeset
2647 2647 repo.setparents(newid, nullid)
2648 2648
2649 2649 # Move bookmarks from old parent to amend commit
2650 2650 bms = repo.nodebookmarks(old.node())
2651 2651 if bms:
2652 2652 marks = repo._bookmarks
2653 2653 for bm in bms:
2654 2654 marks[bm] = newid
2655 2655 marks.write()
2656 2656 #commit the whole amend process
2657 2657 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2658 2658 if createmarkers and newid != old.node():
2659 2659 # mark the new changeset as successor of the rewritten one
2660 2660 new = repo[newid]
2661 2661 obs = [(old, (new,))]
2662 2662 if node:
2663 2663 obs.append((ctx, ()))
2664 2664
2665 2665 obsolete.createmarkers(repo, obs)
2666 2666 tr.close()
2667 2667 finally:
2668 2668 tr.release()
2669 2669 dsguard.close()
2670 2670 if not createmarkers and newid != old.node():
2671 2671 # Strip the intermediate commit (if there was one) and the amended
2672 2672 # commit
2673 2673 if node:
2674 2674 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2675 2675 ui.note(_('stripping amended changeset %s\n') % old)
2676 2676 repair.strip(ui, repo, old.node(), topic='amend-backup')
2677 2677 finally:
2678 2678 lockmod.release(lock, dsguard, wlock)
2679 2679 return newid
2680 2680
2681 2681 def commiteditor(repo, ctx, subs, editform=''):
2682 2682 if ctx.description():
2683 2683 return ctx.description()
2684 2684 return commitforceeditor(repo, ctx, subs, editform=editform)
2685 2685
2686 2686 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2687 2687 editform=''):
2688 2688 if not extramsg:
2689 2689 extramsg = _("Leave message empty to abort commit.")
2690 2690
2691 2691 forms = [e for e in editform.split('.') if e]
2692 2692 forms.insert(0, 'changeset')
2693 2693 while forms:
2694 2694 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2695 2695 if tmpl:
2696 2696 committext = buildcommittemplate(repo, ctx, subs, extramsg, tmpl)
2697 2697 break
2698 2698 forms.pop()
2699 2699 else:
2700 2700 committext = buildcommittext(repo, ctx, subs, extramsg)
2701 2701
2702 2702 # run editor in the repository root
2703 2703 olddir = os.getcwd()
2704 2704 os.chdir(repo.root)
2705 2705 text = repo.ui.edit(committext, ctx.user(), ctx.extra(), editform=editform)
2706 2706 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2707 2707 os.chdir(olddir)
2708 2708
2709 2709 if finishdesc:
2710 2710 text = finishdesc(text)
2711 2711 if not text.strip():
2712 2712 raise util.Abort(_("empty commit message"))
2713 2713
2714 2714 return text
2715 2715
2716 2716 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2717 2717 ui = repo.ui
2718 2718 tmpl, mapfile = gettemplate(ui, tmpl, None)
2719 2719
2720 2720 try:
2721 2721 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2722 2722 except SyntaxError, inst:
2723 2723 raise util.Abort(inst.args[0])
2724 2724
2725 2725 for k, v in repo.ui.configitems('committemplate'):
2726 2726 if k != 'changeset':
2727 2727 t.t.cache[k] = v
2728 2728
2729 2729 if not extramsg:
2730 2730 extramsg = '' # ensure that extramsg is string
2731 2731
2732 2732 ui.pushbuffer()
2733 2733 t.show(ctx, extramsg=extramsg)
2734 2734 return ui.popbuffer()
2735 2735
2736 2736 def buildcommittext(repo, ctx, subs, extramsg):
2737 2737 edittext = []
2738 2738 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2739 2739 if ctx.description():
2740 2740 edittext.append(ctx.description())
2741 2741 edittext.append("")
2742 2742 edittext.append("") # Empty line between message and comments.
2743 2743 edittext.append(_("HG: Enter commit message."
2744 2744 " Lines beginning with 'HG:' are removed."))
2745 2745 edittext.append("HG: %s" % extramsg)
2746 2746 edittext.append("HG: --")
2747 2747 edittext.append(_("HG: user: %s") % ctx.user())
2748 2748 if ctx.p2():
2749 2749 edittext.append(_("HG: branch merge"))
2750 2750 if ctx.branch():
2751 2751 edittext.append(_("HG: branch '%s'") % ctx.branch())
2752 2752 if bookmarks.isactivewdirparent(repo):
2753 2753 edittext.append(_("HG: bookmark '%s'") % repo._activebookmark)
2754 2754 edittext.extend([_("HG: subrepo %s") % s for s in subs])
2755 2755 edittext.extend([_("HG: added %s") % f for f in added])
2756 2756 edittext.extend([_("HG: changed %s") % f for f in modified])
2757 2757 edittext.extend([_("HG: removed %s") % f for f in removed])
2758 2758 if not added and not modified and not removed:
2759 2759 edittext.append(_("HG: no files changed"))
2760 2760 edittext.append("")
2761 2761
2762 2762 return "\n".join(edittext)
2763 2763
2764 2764 def commitstatus(repo, node, branch, bheads=None, opts={}):
2765 2765 ctx = repo[node]
2766 2766 parents = ctx.parents()
2767 2767
2768 2768 if (not opts.get('amend') and bheads and node not in bheads and not
2769 2769 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2770 2770 repo.ui.status(_('created new head\n'))
2771 2771 # The message is not printed for initial roots. For the other
2772 2772 # changesets, it is printed in the following situations:
2773 2773 #
2774 2774 # Par column: for the 2 parents with ...
2775 2775 # N: null or no parent
2776 2776 # B: parent is on another named branch
2777 2777 # C: parent is a regular non head changeset
2778 2778 # H: parent was a branch head of the current branch
2779 2779 # Msg column: whether we print "created new head" message
2780 2780 # In the following, it is assumed that there already exists some
2781 2781 # initial branch heads of the current branch, otherwise nothing is
2782 2782 # printed anyway.
2783 2783 #
2784 2784 # Par Msg Comment
2785 2785 # N N y additional topo root
2786 2786 #
2787 2787 # B N y additional branch root
2788 2788 # C N y additional topo head
2789 2789 # H N n usual case
2790 2790 #
2791 2791 # B B y weird additional branch root
2792 2792 # C B y branch merge
2793 2793 # H B n merge with named branch
2794 2794 #
2795 2795 # C C y additional head from merge
2796 2796 # C H n merge with a head
2797 2797 #
2798 2798 # H H n head merge: head count decreases
2799 2799
2800 2800 if not opts.get('close_branch'):
2801 2801 for r in parents:
2802 2802 if r.closesbranch() and r.branch() == branch:
2803 2803 repo.ui.status(_('reopening closed branch head %d\n') % r)
2804 2804
2805 2805 if repo.ui.debugflag:
2806 2806 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2807 2807 elif repo.ui.verbose:
2808 2808 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2809 2809
2810 2810 def revert(ui, repo, ctx, parents, *pats, **opts):
2811 2811 parent, p2 = parents
2812 2812 node = ctx.node()
2813 2813
2814 2814 mf = ctx.manifest()
2815 2815 if node == p2:
2816 2816 parent = p2
2817 2817 if node == parent:
2818 2818 pmf = mf
2819 2819 else:
2820 2820 pmf = None
2821 2821
2822 2822 # need all matching names in dirstate and manifest of target rev,
2823 2823 # so have to walk both. do not print errors if files exist in one
2824 2824 # but not other. in both cases, filesets should be evaluated against
2825 2825 # workingctx to get consistent result (issue4497). this means 'set:**'
2826 2826 # cannot be used to select missing files from target rev.
2827 2827
2828 2828 # `names` is a mapping for all elements in working copy and target revision
2829 2829 # The mapping is in the form:
2830 2830 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2831 2831 names = {}
2832 2832
2833 2833 wlock = repo.wlock()
2834 2834 try:
2835 2835 ## filling of the `names` mapping
2836 2836 # walk dirstate to fill `names`
2837 2837
2838 2838 interactive = opts.get('interactive', False)
2839 2839 wctx = repo[None]
2840 2840 m = scmutil.match(wctx, pats, opts)
2841 2841
2842 2842 # we'll need this later
2843 2843 targetsubs = sorted(s for s in wctx.substate if m(s))
2844 2844
2845 2845 if not m.always():
2846 2846 m.bad = lambda x, y: False
2847 2847 for abs in repo.walk(m):
2848 2848 names[abs] = m.rel(abs), m.exact(abs)
2849 2849
2850 2850 # walk target manifest to fill `names`
2851 2851
2852 2852 def badfn(path, msg):
2853 2853 if path in names:
2854 2854 return
2855 2855 if path in ctx.substate:
2856 2856 return
2857 2857 path_ = path + '/'
2858 2858 for f in names:
2859 2859 if f.startswith(path_):
2860 2860 return
2861 2861 ui.warn("%s: %s\n" % (m.rel(path), msg))
2862 2862
2863 2863 m.bad = badfn
2864 2864 for abs in ctx.walk(m):
2865 2865 if abs not in names:
2866 2866 names[abs] = m.rel(abs), m.exact(abs)
2867 2867
2868 2868 # Find status of all file in `names`.
2869 2869 m = scmutil.matchfiles(repo, names)
2870 2870
2871 2871 changes = repo.status(node1=node, match=m,
2872 2872 unknown=True, ignored=True, clean=True)
2873 2873 else:
2874 2874 changes = repo.status(node1=node, match=m)
2875 2875 for kind in changes:
2876 2876 for abs in kind:
2877 2877 names[abs] = m.rel(abs), m.exact(abs)
2878 2878
2879 2879 m = scmutil.matchfiles(repo, names)
2880 2880
2881 2881 modified = set(changes.modified)
2882 2882 added = set(changes.added)
2883 2883 removed = set(changes.removed)
2884 2884 _deleted = set(changes.deleted)
2885 2885 unknown = set(changes.unknown)
2886 2886 unknown.update(changes.ignored)
2887 2887 clean = set(changes.clean)
2888 2888 modadded = set()
2889 2889
2890 2890 # split between files known in target manifest and the others
2891 2891 smf = set(mf)
2892 2892
2893 2893 # determine the exact nature of the deleted changesets
2894 2894 deladded = _deleted - smf
2895 2895 deleted = _deleted - deladded
2896 2896
2897 2897 # We need to account for the state of the file in the dirstate,
2898 2898 # even when we revert against something else than parent. This will
2899 2899 # slightly alter the behavior of revert (doing back up or not, delete
2900 2900 # or just forget etc).
2901 2901 if parent == node:
2902 2902 dsmodified = modified
2903 2903 dsadded = added
2904 2904 dsremoved = removed
2905 2905 # store all local modifications, useful later for rename detection
2906 2906 localchanges = dsmodified | dsadded
2907 2907 modified, added, removed = set(), set(), set()
2908 2908 else:
2909 2909 changes = repo.status(node1=parent, match=m)
2910 2910 dsmodified = set(changes.modified)
2911 2911 dsadded = set(changes.added)
2912 2912 dsremoved = set(changes.removed)
2913 2913 # store all local modifications, useful later for rename detection
2914 2914 localchanges = dsmodified | dsadded
2915 2915
2916 2916 # only take into account for removes between wc and target
2917 2917 clean |= dsremoved - removed
2918 2918 dsremoved &= removed
2919 2919 # distinct between dirstate remove and other
2920 2920 removed -= dsremoved
2921 2921
2922 2922 modadded = added & dsmodified
2923 2923 added -= modadded
2924 2924
2925 2925 # tell newly modified apart.
2926 2926 dsmodified &= modified
2927 2927 dsmodified |= modified & dsadded # dirstate added may needs backup
2928 2928 modified -= dsmodified
2929 2929
2930 2930 # We need to wait for some post-processing to update this set
2931 2931 # before making the distinction. The dirstate will be used for
2932 2932 # that purpose.
2933 2933 dsadded = added
2934 2934
2935 2935 # in case of merge, files that are actually added can be reported as
2936 2936 # modified, we need to post process the result
2937 2937 if p2 != nullid:
2938 2938 if pmf is None:
2939 2939 # only need parent manifest in the merge case,
2940 2940 # so do not read by default
2941 2941 pmf = repo[parent].manifest()
2942 2942 mergeadd = dsmodified - set(pmf)
2943 2943 dsadded |= mergeadd
2944 2944 dsmodified -= mergeadd
2945 2945
2946 2946 # if f is a rename, update `names` to also revert the source
2947 2947 cwd = repo.getcwd()
2948 2948 for f in localchanges:
2949 2949 src = repo.dirstate.copied(f)
2950 2950 # XXX should we check for rename down to target node?
2951 2951 if src and src not in names and repo.dirstate[src] == 'r':
2952 2952 dsremoved.add(src)
2953 2953 names[src] = (repo.pathto(src, cwd), True)
2954 2954
2955 2955 # distinguish between file to forget and the other
2956 2956 added = set()
2957 2957 for abs in dsadded:
2958 2958 if repo.dirstate[abs] != 'a':
2959 2959 added.add(abs)
2960 2960 dsadded -= added
2961 2961
2962 2962 for abs in deladded:
2963 2963 if repo.dirstate[abs] == 'a':
2964 2964 dsadded.add(abs)
2965 2965 deladded -= dsadded
2966 2966
2967 2967 # For files marked as removed, we check if an unknown file is present at
2968 2968 # the same path. If a such file exists it may need to be backed up.
2969 2969 # Making the distinction at this stage helps have simpler backup
2970 2970 # logic.
2971 2971 removunk = set()
2972 2972 for abs in removed:
2973 2973 target = repo.wjoin(abs)
2974 2974 if os.path.lexists(target):
2975 2975 removunk.add(abs)
2976 2976 removed -= removunk
2977 2977
2978 2978 dsremovunk = set()
2979 2979 for abs in dsremoved:
2980 2980 target = repo.wjoin(abs)
2981 2981 if os.path.lexists(target):
2982 2982 dsremovunk.add(abs)
2983 2983 dsremoved -= dsremovunk
2984 2984
2985 2985 # action to be actually performed by revert
2986 2986 # (<list of file>, message>) tuple
2987 2987 actions = {'revert': ([], _('reverting %s\n')),
2988 2988 'add': ([], _('adding %s\n')),
2989 2989 'remove': ([], _('removing %s\n')),
2990 2990 'drop': ([], _('removing %s\n')),
2991 2991 'forget': ([], _('forgetting %s\n')),
2992 2992 'undelete': ([], _('undeleting %s\n')),
2993 2993 'noop': (None, _('no changes needed to %s\n')),
2994 2994 'unknown': (None, _('file not managed: %s\n')),
2995 2995 }
2996 2996
2997 2997 # "constant" that convey the backup strategy.
2998 2998 # All set to `discard` if `no-backup` is set do avoid checking
2999 2999 # no_backup lower in the code.
3000 3000 # These values are ordered for comparison purposes
3001 3001 backup = 2 # unconditionally do backup
3002 3002 check = 1 # check if the existing file differs from target
3003 3003 discard = 0 # never do backup
3004 3004 if opts.get('no_backup'):
3005 3005 backup = check = discard
3006 3006
3007 3007 backupanddel = actions['remove']
3008 3008 if not opts.get('no_backup'):
3009 3009 backupanddel = actions['drop']
3010 3010
3011 3011 disptable = (
3012 3012 # dispatch table:
3013 3013 # file state
3014 3014 # action
3015 3015 # make backup
3016 3016
3017 3017 ## Sets that results that will change file on disk
3018 3018 # Modified compared to target, no local change
3019 3019 (modified, actions['revert'], discard),
3020 3020 # Modified compared to target, but local file is deleted
3021 3021 (deleted, actions['revert'], discard),
3022 3022 # Modified compared to target, local change
3023 3023 (dsmodified, actions['revert'], backup),
3024 3024 # Added since target
3025 3025 (added, actions['remove'], discard),
3026 3026 # Added in working directory
3027 3027 (dsadded, actions['forget'], discard),
3028 3028 # Added since target, have local modification
3029 3029 (modadded, backupanddel, backup),
3030 3030 # Added since target but file is missing in working directory
3031 3031 (deladded, actions['drop'], discard),
3032 3032 # Removed since target, before working copy parent
3033 3033 (removed, actions['add'], discard),
3034 3034 # Same as `removed` but an unknown file exists at the same path
3035 3035 (removunk, actions['add'], check),
3036 3036 # Removed since targe, marked as such in working copy parent
3037 3037 (dsremoved, actions['undelete'], discard),
3038 3038 # Same as `dsremoved` but an unknown file exists at the same path
3039 3039 (dsremovunk, actions['undelete'], check),
3040 3040 ## the following sets does not result in any file changes
3041 3041 # File with no modification
3042 3042 (clean, actions['noop'], discard),
3043 3043 # Existing file, not tracked anywhere
3044 3044 (unknown, actions['unknown'], discard),
3045 3045 )
3046 3046
3047 3047 for abs, (rel, exact) in sorted(names.items()):
3048 3048 # target file to be touch on disk (relative to cwd)
3049 3049 target = repo.wjoin(abs)
3050 3050 # search the entry in the dispatch table.
3051 3051 # if the file is in any of these sets, it was touched in the working
3052 3052 # directory parent and we are sure it needs to be reverted.
3053 3053 for table, (xlist, msg), dobackup in disptable:
3054 3054 if abs not in table:
3055 3055 continue
3056 3056 if xlist is not None:
3057 3057 xlist.append(abs)
3058 3058 if dobackup and (backup <= dobackup
3059 3059 or wctx[abs].cmp(ctx[abs])):
3060 3060 bakname = "%s.orig" % rel
3061 3061 ui.note(_('saving current version of %s as %s\n') %
3062 3062 (rel, bakname))
3063 3063 if not opts.get('dry_run'):
3064 3064 if interactive:
3065 3065 util.copyfile(target, bakname)
3066 3066 else:
3067 3067 util.rename(target, bakname)
3068 3068 if ui.verbose or not exact:
3069 3069 if not isinstance(msg, basestring):
3070 3070 msg = msg(abs)
3071 3071 ui.status(msg % rel)
3072 3072 elif exact:
3073 3073 ui.warn(msg % rel)
3074 3074 break
3075 3075
3076 3076 if not opts.get('dry_run'):
3077 3077 needdata = ('revert', 'add', 'undelete')
3078 3078 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3079 3079 _performrevert(repo, parents, ctx, actions, interactive)
3080 3080
3081 3081 if targetsubs:
3082 3082 # Revert the subrepos on the revert list
3083 3083 for sub in targetsubs:
3084 3084 try:
3085 3085 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3086 3086 except KeyError:
3087 3087 raise util.Abort("subrepository '%s' does not exist in %s!"
3088 3088 % (sub, short(ctx.node())))
3089 3089 finally:
3090 3090 wlock.release()
3091 3091
3092 3092 def _revertprefetch(repo, ctx, *files):
3093 3093 """Let extension changing the storage layer prefetch content"""
3094 3094 pass
3095 3095
3096 3096 def _performrevert(repo, parents, ctx, actions, interactive=False):
3097 3097 """function that actually perform all the actions computed for revert
3098 3098
3099 3099 This is an independent function to let extension to plug in and react to
3100 3100 the imminent revert.
3101 3101
3102 3102 Make sure you have the working directory locked when calling this function.
3103 3103 """
3104 3104 parent, p2 = parents
3105 3105 node = ctx.node()
3106 3106 def checkout(f):
3107 3107 fc = ctx[f]
3108 3108 return repo.wwrite(f, fc.data(), fc.flags())
3109 3109
3110 3110 audit_path = pathutil.pathauditor(repo.root)
3111 3111 for f in actions['forget'][0]:
3112 3112 repo.dirstate.drop(f)
3113 3113 for f in actions['remove'][0]:
3114 3114 audit_path(f)
3115 3115 try:
3116 3116 util.unlinkpath(repo.wjoin(f))
3117 3117 except OSError:
3118 3118 pass
3119 3119 repo.dirstate.remove(f)
3120 3120 for f in actions['drop'][0]:
3121 3121 audit_path(f)
3122 3122 repo.dirstate.remove(f)
3123 3123
3124 3124 normal = None
3125 3125 if node == parent:
3126 3126 # We're reverting to our parent. If possible, we'd like status
3127 3127 # to report the file as clean. We have to use normallookup for
3128 3128 # merges to avoid losing information about merged/dirty files.
3129 3129 if p2 != nullid:
3130 3130 normal = repo.dirstate.normallookup
3131 3131 else:
3132 3132 normal = repo.dirstate.normal
3133 3133
3134 3134 newlyaddedandmodifiedfiles = set()
3135 3135 if interactive:
3136 3136 # Prompt the user for changes to revert
3137 3137 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3138 3138 m = scmutil.match(ctx, torevert, {})
3139 3139 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3140 3140 diffopts.nodates = True
3141 3141 diffopts.git = True
3142 reversehunks = repo.ui.configbool('experimental',
3143 'revertalternateinteractivemode',
3144 False)
3145 if reversehunks:
3146 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3147 else:
3142 3148 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3143 3149 originalchunks = patch.parsepatch(diff)
3150
3144 3151 try:
3152
3145 3153 chunks = recordfilter(repo.ui, originalchunks)
3154 if reversehunks:
3155 chunks = patch.reversehunks(chunks)
3156
3146 3157 except patch.PatchError, err:
3147 3158 raise util.Abort(_('error parsing patch: %s') % err)
3148 3159
3149 3160 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3150 3161 # Apply changes
3151 3162 fp = cStringIO.StringIO()
3152 3163 for c in chunks:
3153 3164 c.write(fp)
3154 3165 dopatch = fp.tell()
3155 3166 fp.seek(0)
3156 3167 if dopatch:
3157 3168 try:
3158 3169 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3159 3170 except patch.PatchError, err:
3160 3171 raise util.Abort(str(err))
3161 3172 del fp
3162 3173 else:
3163 3174 for f in actions['revert'][0]:
3164 3175 wsize = checkout(f)
3165 3176 if normal:
3166 3177 normal(f)
3167 3178 elif wsize == repo.dirstate._map[f][2]:
3168 3179 # changes may be overlooked without normallookup,
3169 3180 # if size isn't changed at reverting
3170 3181 repo.dirstate.normallookup(f)
3171 3182
3172 3183 for f in actions['add'][0]:
3173 3184 # Don't checkout modified files, they are already created by the diff
3174 3185 if f not in newlyaddedandmodifiedfiles:
3175 3186 checkout(f)
3176 3187 repo.dirstate.add(f)
3177 3188
3178 3189 normal = repo.dirstate.normallookup
3179 3190 if node == parent and p2 == nullid:
3180 3191 normal = repo.dirstate.normal
3181 3192 for f in actions['undelete'][0]:
3182 3193 checkout(f)
3183 3194 normal(f)
3184 3195
3185 3196 copied = copies.pathcopies(repo[parent], ctx)
3186 3197
3187 3198 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3188 3199 if f in copied:
3189 3200 repo.dirstate.copy(copied[f], f)
3190 3201
3191 3202 def command(table):
3192 3203 """Returns a function object to be used as a decorator for making commands.
3193 3204
3194 3205 This function receives a command table as its argument. The table should
3195 3206 be a dict.
3196 3207
3197 3208 The returned function can be used as a decorator for adding commands
3198 3209 to that command table. This function accepts multiple arguments to define
3199 3210 a command.
3200 3211
3201 3212 The first argument is the command name.
3202 3213
3203 3214 The options argument is an iterable of tuples defining command arguments.
3204 3215 See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
3205 3216
3206 3217 The synopsis argument defines a short, one line summary of how to use the
3207 3218 command. This shows up in the help output.
3208 3219
3209 3220 The norepo argument defines whether the command does not require a
3210 3221 local repository. Most commands operate against a repository, thus the
3211 3222 default is False.
3212 3223
3213 3224 The optionalrepo argument defines whether the command optionally requires
3214 3225 a local repository.
3215 3226
3216 3227 The inferrepo argument defines whether to try to find a repository from the
3217 3228 command line arguments. If True, arguments will be examined for potential
3218 3229 repository locations. See ``findrepo()``. If a repository is found, it
3219 3230 will be used.
3220 3231 """
3221 3232 def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False,
3222 3233 inferrepo=False):
3223 3234 def decorator(func):
3224 3235 if synopsis:
3225 3236 table[name] = func, list(options), synopsis
3226 3237 else:
3227 3238 table[name] = func, list(options)
3228 3239
3229 3240 if norepo:
3230 3241 # Avoid import cycle.
3231 3242 import commands
3232 3243 commands.norepo += ' %s' % ' '.join(parsealiases(name))
3233 3244
3234 3245 if optionalrepo:
3235 3246 import commands
3236 3247 commands.optionalrepo += ' %s' % ' '.join(parsealiases(name))
3237 3248
3238 3249 if inferrepo:
3239 3250 import commands
3240 3251 commands.inferrepo += ' %s' % ' '.join(parsealiases(name))
3241 3252
3242 3253 return func
3243 3254 return decorator
3244 3255
3245 3256 return cmd
3246 3257
3247 3258 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3248 3259 # commands.outgoing. "missing" is "missing" of the result of
3249 3260 # "findcommonoutgoing()"
3250 3261 outgoinghooks = util.hooks()
3251 3262
3252 3263 # a list of (ui, repo) functions called by commands.summary
3253 3264 summaryhooks = util.hooks()
3254 3265
3255 3266 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3256 3267 #
3257 3268 # functions should return tuple of booleans below, if 'changes' is None:
3258 3269 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3259 3270 #
3260 3271 # otherwise, 'changes' is a tuple of tuples below:
3261 3272 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3262 3273 # - (desturl, destbranch, destpeer, outgoing)
3263 3274 summaryremotehooks = util.hooks()
3264 3275
3265 3276 # A list of state files kept by multistep operations like graft.
3266 3277 # Since graft cannot be aborted, it is considered 'clearable' by update.
3267 3278 # note: bisect is intentionally excluded
3268 3279 # (state file, clearable, allowcommit, error, hint)
3269 3280 unfinishedstates = [
3270 3281 ('graftstate', True, False, _('graft in progress'),
3271 3282 _("use 'hg graft --continue' or 'hg update' to abort")),
3272 3283 ('updatestate', True, False, _('last update was interrupted'),
3273 3284 _("use 'hg update' to get a consistent checkout"))
3274 3285 ]
3275 3286
3276 3287 def checkunfinished(repo, commit=False):
3277 3288 '''Look for an unfinished multistep operation, like graft, and abort
3278 3289 if found. It's probably good to check this right before
3279 3290 bailifchanged().
3280 3291 '''
3281 3292 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3282 3293 if commit and allowcommit:
3283 3294 continue
3284 3295 if repo.vfs.exists(f):
3285 3296 raise util.Abort(msg, hint=hint)
3286 3297
3287 3298 def clearunfinished(repo):
3288 3299 '''Check for unfinished operations (as above), and clear the ones
3289 3300 that are clearable.
3290 3301 '''
3291 3302 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3292 3303 if not clearable and repo.vfs.exists(f):
3293 3304 raise util.Abort(msg, hint=hint)
3294 3305 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3295 3306 if clearable and repo.vfs.exists(f):
3296 3307 util.unlink(repo.join(f))
3297 3308
3298 3309 class dirstateguard(object):
3299 3310 '''Restore dirstate at unexpected failure.
3300 3311
3301 3312 At the construction, this class does:
3302 3313
3303 3314 - write current ``repo.dirstate`` out, and
3304 3315 - save ``.hg/dirstate`` into the backup file
3305 3316
3306 3317 This restores ``.hg/dirstate`` from backup file, if ``release()``
3307 3318 is invoked before ``close()``.
3308 3319
3309 3320 This just removes the backup file at ``close()`` before ``release()``.
3310 3321 '''
3311 3322
3312 3323 def __init__(self, repo, name):
3313 3324 repo.dirstate.write()
3314 3325 self._repo = repo
3315 3326 self._filename = 'dirstate.backup.%s.%d' % (name, id(self))
3316 3327 repo.vfs.write(self._filename, repo.vfs.tryread('dirstate'))
3317 3328 self._active = True
3318 3329 self._closed = False
3319 3330
3320 3331 def __del__(self):
3321 3332 if self._active: # still active
3322 3333 # this may occur, even if this class is used correctly:
3323 3334 # for example, releasing other resources like transaction
3324 3335 # may raise exception before ``dirstateguard.release`` in
3325 3336 # ``release(tr, ....)``.
3326 3337 self._abort()
3327 3338
3328 3339 def close(self):
3329 3340 if not self._active: # already inactivated
3330 3341 msg = (_("can't close already inactivated backup: %s")
3331 3342 % self._filename)
3332 3343 raise util.Abort(msg)
3333 3344
3334 3345 self._repo.vfs.unlink(self._filename)
3335 3346 self._active = False
3336 3347 self._closed = True
3337 3348
3338 3349 def _abort(self):
3339 3350 # this "invalidate()" prevents "wlock.release()" from writing
3340 3351 # changes of dirstate out after restoring to original status
3341 3352 self._repo.dirstate.invalidate()
3342 3353
3343 3354 self._repo.vfs.rename(self._filename, 'dirstate')
3344 3355 self._active = False
3345 3356
3346 3357 def release(self):
3347 3358 if not self._closed:
3348 3359 if not self._active: # already inactivated
3349 3360 msg = (_("can't release already inactivated backup: %s")
3350 3361 % self._filename)
3351 3362 raise util.Abort(msg)
3352 3363 self._abort()
@@ -1,2480 +1,2551 b''
1 1 # patch.py - patch file parsing routines
2 2 #
3 3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
4 4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 import collections
10 10 import cStringIO, email, os, errno, re, posixpath, copy
11 11 import tempfile, zlib, shutil
12 12 # On python2.4 you have to import these by name or they fail to
13 13 # load. This was not a problem on Python 2.7.
14 14 import email.Generator
15 15 import email.Parser
16 16
17 17 from i18n import _
18 18 from node import hex, short
19 19 import base85, mdiff, scmutil, util, diffhelpers, copies, encoding, error
20 20 import pathutil
21 21
22 22 gitre = re.compile('diff --git a/(.*) b/(.*)')
23 23 tabsplitter = re.compile(r'(\t+|[^\t]+)')
24 24
25 25 class PatchError(Exception):
26 26 pass
27 27
28 28
29 29 # public functions
30 30
31 31 def split(stream):
32 32 '''return an iterator of individual patches from a stream'''
33 33 def isheader(line, inheader):
34 34 if inheader and line[0] in (' ', '\t'):
35 35 # continuation
36 36 return True
37 37 if line[0] in (' ', '-', '+'):
38 38 # diff line - don't check for header pattern in there
39 39 return False
40 40 l = line.split(': ', 1)
41 41 return len(l) == 2 and ' ' not in l[0]
42 42
43 43 def chunk(lines):
44 44 return cStringIO.StringIO(''.join(lines))
45 45
46 46 def hgsplit(stream, cur):
47 47 inheader = True
48 48
49 49 for line in stream:
50 50 if not line.strip():
51 51 inheader = False
52 52 if not inheader and line.startswith('# HG changeset patch'):
53 53 yield chunk(cur)
54 54 cur = []
55 55 inheader = True
56 56
57 57 cur.append(line)
58 58
59 59 if cur:
60 60 yield chunk(cur)
61 61
62 62 def mboxsplit(stream, cur):
63 63 for line in stream:
64 64 if line.startswith('From '):
65 65 for c in split(chunk(cur[1:])):
66 66 yield c
67 67 cur = []
68 68
69 69 cur.append(line)
70 70
71 71 if cur:
72 72 for c in split(chunk(cur[1:])):
73 73 yield c
74 74
75 75 def mimesplit(stream, cur):
76 76 def msgfp(m):
77 77 fp = cStringIO.StringIO()
78 78 g = email.Generator.Generator(fp, mangle_from_=False)
79 79 g.flatten(m)
80 80 fp.seek(0)
81 81 return fp
82 82
83 83 for line in stream:
84 84 cur.append(line)
85 85 c = chunk(cur)
86 86
87 87 m = email.Parser.Parser().parse(c)
88 88 if not m.is_multipart():
89 89 yield msgfp(m)
90 90 else:
91 91 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
92 92 for part in m.walk():
93 93 ct = part.get_content_type()
94 94 if ct not in ok_types:
95 95 continue
96 96 yield msgfp(part)
97 97
98 98 def headersplit(stream, cur):
99 99 inheader = False
100 100
101 101 for line in stream:
102 102 if not inheader and isheader(line, inheader):
103 103 yield chunk(cur)
104 104 cur = []
105 105 inheader = True
106 106 if inheader and not isheader(line, inheader):
107 107 inheader = False
108 108
109 109 cur.append(line)
110 110
111 111 if cur:
112 112 yield chunk(cur)
113 113
114 114 def remainder(cur):
115 115 yield chunk(cur)
116 116
117 117 class fiter(object):
118 118 def __init__(self, fp):
119 119 self.fp = fp
120 120
121 121 def __iter__(self):
122 122 return self
123 123
124 124 def next(self):
125 125 l = self.fp.readline()
126 126 if not l:
127 127 raise StopIteration
128 128 return l
129 129
130 130 inheader = False
131 131 cur = []
132 132
133 133 mimeheaders = ['content-type']
134 134
135 135 if not util.safehasattr(stream, 'next'):
136 136 # http responses, for example, have readline but not next
137 137 stream = fiter(stream)
138 138
139 139 for line in stream:
140 140 cur.append(line)
141 141 if line.startswith('# HG changeset patch'):
142 142 return hgsplit(stream, cur)
143 143 elif line.startswith('From '):
144 144 return mboxsplit(stream, cur)
145 145 elif isheader(line, inheader):
146 146 inheader = True
147 147 if line.split(':', 1)[0].lower() in mimeheaders:
148 148 # let email parser handle this
149 149 return mimesplit(stream, cur)
150 150 elif line.startswith('--- ') and inheader:
151 151 # No evil headers seen by diff start, split by hand
152 152 return headersplit(stream, cur)
153 153 # Not enough info, keep reading
154 154
155 155 # if we are here, we have a very plain patch
156 156 return remainder(cur)
157 157
158 158 def extract(ui, fileobj):
159 159 '''extract patch from data read from fileobj.
160 160
161 161 patch can be a normal patch or contained in an email message.
162 162
163 163 return tuple (filename, message, user, date, branch, node, p1, p2).
164 164 Any item in the returned tuple can be None. If filename is None,
165 165 fileobj did not contain a patch. Caller must unlink filename when done.'''
166 166
167 167 # attempt to detect the start of a patch
168 168 # (this heuristic is borrowed from quilt)
169 169 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |'
170 170 r'retrieving revision [0-9]+(\.[0-9]+)*$|'
171 171 r'---[ \t].*?^\+\+\+[ \t]|'
172 172 r'\*\*\*[ \t].*?^---[ \t])', re.MULTILINE|re.DOTALL)
173 173
174 174 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
175 175 tmpfp = os.fdopen(fd, 'w')
176 176 try:
177 177 msg = email.Parser.Parser().parse(fileobj)
178 178
179 179 subject = msg['Subject']
180 180 user = msg['From']
181 181 if not subject and not user:
182 182 # Not an email, restore parsed headers if any
183 183 subject = '\n'.join(': '.join(h) for h in msg.items()) + '\n'
184 184
185 185 # should try to parse msg['Date']
186 186 date = None
187 187 nodeid = None
188 188 branch = None
189 189 parents = []
190 190
191 191 if subject:
192 192 if subject.startswith('[PATCH'):
193 193 pend = subject.find(']')
194 194 if pend >= 0:
195 195 subject = subject[pend + 1:].lstrip()
196 196 subject = re.sub(r'\n[ \t]+', ' ', subject)
197 197 ui.debug('Subject: %s\n' % subject)
198 198 if user:
199 199 ui.debug('From: %s\n' % user)
200 200 diffs_seen = 0
201 201 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
202 202 message = ''
203 203 for part in msg.walk():
204 204 content_type = part.get_content_type()
205 205 ui.debug('Content-Type: %s\n' % content_type)
206 206 if content_type not in ok_types:
207 207 continue
208 208 payload = part.get_payload(decode=True)
209 209 m = diffre.search(payload)
210 210 if m:
211 211 hgpatch = False
212 212 hgpatchheader = False
213 213 ignoretext = False
214 214
215 215 ui.debug('found patch at byte %d\n' % m.start(0))
216 216 diffs_seen += 1
217 217 cfp = cStringIO.StringIO()
218 218 for line in payload[:m.start(0)].splitlines():
219 219 if line.startswith('# HG changeset patch') and not hgpatch:
220 220 ui.debug('patch generated by hg export\n')
221 221 hgpatch = True
222 222 hgpatchheader = True
223 223 # drop earlier commit message content
224 224 cfp.seek(0)
225 225 cfp.truncate()
226 226 subject = None
227 227 elif hgpatchheader:
228 228 if line.startswith('# User '):
229 229 user = line[7:]
230 230 ui.debug('From: %s\n' % user)
231 231 elif line.startswith("# Date "):
232 232 date = line[7:]
233 233 elif line.startswith("# Branch "):
234 234 branch = line[9:]
235 235 elif line.startswith("# Node ID "):
236 236 nodeid = line[10:]
237 237 elif line.startswith("# Parent "):
238 238 parents.append(line[9:].lstrip())
239 239 elif not line.startswith("# "):
240 240 hgpatchheader = False
241 241 elif line == '---':
242 242 ignoretext = True
243 243 if not hgpatchheader and not ignoretext:
244 244 cfp.write(line)
245 245 cfp.write('\n')
246 246 message = cfp.getvalue()
247 247 if tmpfp:
248 248 tmpfp.write(payload)
249 249 if not payload.endswith('\n'):
250 250 tmpfp.write('\n')
251 251 elif not diffs_seen and message and content_type == 'text/plain':
252 252 message += '\n' + payload
253 253 except: # re-raises
254 254 tmpfp.close()
255 255 os.unlink(tmpname)
256 256 raise
257 257
258 258 if subject and not message.startswith(subject):
259 259 message = '%s\n%s' % (subject, message)
260 260 tmpfp.close()
261 261 if not diffs_seen:
262 262 os.unlink(tmpname)
263 263 return None, message, user, date, branch, None, None, None
264 264
265 265 if parents:
266 266 p1 = parents.pop(0)
267 267 else:
268 268 p1 = None
269 269
270 270 if parents:
271 271 p2 = parents.pop(0)
272 272 else:
273 273 p2 = None
274 274
275 275 return tmpname, message, user, date, branch, nodeid, p1, p2
276 276
277 277 class patchmeta(object):
278 278 """Patched file metadata
279 279
280 280 'op' is the performed operation within ADD, DELETE, RENAME, MODIFY
281 281 or COPY. 'path' is patched file path. 'oldpath' is set to the
282 282 origin file when 'op' is either COPY or RENAME, None otherwise. If
283 283 file mode is changed, 'mode' is a tuple (islink, isexec) where
284 284 'islink' is True if the file is a symlink and 'isexec' is True if
285 285 the file is executable. Otherwise, 'mode' is None.
286 286 """
287 287 def __init__(self, path):
288 288 self.path = path
289 289 self.oldpath = None
290 290 self.mode = None
291 291 self.op = 'MODIFY'
292 292 self.binary = False
293 293
294 294 def setmode(self, mode):
295 295 islink = mode & 020000
296 296 isexec = mode & 0100
297 297 self.mode = (islink, isexec)
298 298
299 299 def copy(self):
300 300 other = patchmeta(self.path)
301 301 other.oldpath = self.oldpath
302 302 other.mode = self.mode
303 303 other.op = self.op
304 304 other.binary = self.binary
305 305 return other
306 306
307 307 def _ispatchinga(self, afile):
308 308 if afile == '/dev/null':
309 309 return self.op == 'ADD'
310 310 return afile == 'a/' + (self.oldpath or self.path)
311 311
312 312 def _ispatchingb(self, bfile):
313 313 if bfile == '/dev/null':
314 314 return self.op == 'DELETE'
315 315 return bfile == 'b/' + self.path
316 316
317 317 def ispatching(self, afile, bfile):
318 318 return self._ispatchinga(afile) and self._ispatchingb(bfile)
319 319
320 320 def __repr__(self):
321 321 return "<patchmeta %s %r>" % (self.op, self.path)
322 322
323 323 def readgitpatch(lr):
324 324 """extract git-style metadata about patches from <patchname>"""
325 325
326 326 # Filter patch for git information
327 327 gp = None
328 328 gitpatches = []
329 329 for line in lr:
330 330 line = line.rstrip(' \r\n')
331 331 if line.startswith('diff --git a/'):
332 332 m = gitre.match(line)
333 333 if m:
334 334 if gp:
335 335 gitpatches.append(gp)
336 336 dst = m.group(2)
337 337 gp = patchmeta(dst)
338 338 elif gp:
339 339 if line.startswith('--- '):
340 340 gitpatches.append(gp)
341 341 gp = None
342 342 continue
343 343 if line.startswith('rename from '):
344 344 gp.op = 'RENAME'
345 345 gp.oldpath = line[12:]
346 346 elif line.startswith('rename to '):
347 347 gp.path = line[10:]
348 348 elif line.startswith('copy from '):
349 349 gp.op = 'COPY'
350 350 gp.oldpath = line[10:]
351 351 elif line.startswith('copy to '):
352 352 gp.path = line[8:]
353 353 elif line.startswith('deleted file'):
354 354 gp.op = 'DELETE'
355 355 elif line.startswith('new file mode '):
356 356 gp.op = 'ADD'
357 357 gp.setmode(int(line[-6:], 8))
358 358 elif line.startswith('new mode '):
359 359 gp.setmode(int(line[-6:], 8))
360 360 elif line.startswith('GIT binary patch'):
361 361 gp.binary = True
362 362 if gp:
363 363 gitpatches.append(gp)
364 364
365 365 return gitpatches
366 366
367 367 class linereader(object):
368 368 # simple class to allow pushing lines back into the input stream
369 369 def __init__(self, fp):
370 370 self.fp = fp
371 371 self.buf = []
372 372
373 373 def push(self, line):
374 374 if line is not None:
375 375 self.buf.append(line)
376 376
377 377 def readline(self):
378 378 if self.buf:
379 379 l = self.buf[0]
380 380 del self.buf[0]
381 381 return l
382 382 return self.fp.readline()
383 383
384 384 def __iter__(self):
385 385 while True:
386 386 l = self.readline()
387 387 if not l:
388 388 break
389 389 yield l
390 390
391 391 class abstractbackend(object):
392 392 def __init__(self, ui):
393 393 self.ui = ui
394 394
395 395 def getfile(self, fname):
396 396 """Return target file data and flags as a (data, (islink,
397 397 isexec)) tuple. Data is None if file is missing/deleted.
398 398 """
399 399 raise NotImplementedError
400 400
401 401 def setfile(self, fname, data, mode, copysource):
402 402 """Write data to target file fname and set its mode. mode is a
403 403 (islink, isexec) tuple. If data is None, the file content should
404 404 be left unchanged. If the file is modified after being copied,
405 405 copysource is set to the original file name.
406 406 """
407 407 raise NotImplementedError
408 408
409 409 def unlink(self, fname):
410 410 """Unlink target file."""
411 411 raise NotImplementedError
412 412
413 413 def writerej(self, fname, failed, total, lines):
414 414 """Write rejected lines for fname. total is the number of hunks
415 415 which failed to apply and total the total number of hunks for this
416 416 files.
417 417 """
418 418 pass
419 419
420 420 def exists(self, fname):
421 421 raise NotImplementedError
422 422
423 423 class fsbackend(abstractbackend):
424 424 def __init__(self, ui, basedir):
425 425 super(fsbackend, self).__init__(ui)
426 426 self.opener = scmutil.opener(basedir)
427 427
428 428 def _join(self, f):
429 429 return os.path.join(self.opener.base, f)
430 430
431 431 def getfile(self, fname):
432 432 if self.opener.islink(fname):
433 433 return (self.opener.readlink(fname), (True, False))
434 434
435 435 isexec = False
436 436 try:
437 437 isexec = self.opener.lstat(fname).st_mode & 0100 != 0
438 438 except OSError, e:
439 439 if e.errno != errno.ENOENT:
440 440 raise
441 441 try:
442 442 return (self.opener.read(fname), (False, isexec))
443 443 except IOError, e:
444 444 if e.errno != errno.ENOENT:
445 445 raise
446 446 return None, None
447 447
448 448 def setfile(self, fname, data, mode, copysource):
449 449 islink, isexec = mode
450 450 if data is None:
451 451 self.opener.setflags(fname, islink, isexec)
452 452 return
453 453 if islink:
454 454 self.opener.symlink(data, fname)
455 455 else:
456 456 self.opener.write(fname, data)
457 457 if isexec:
458 458 self.opener.setflags(fname, False, True)
459 459
460 460 def unlink(self, fname):
461 461 self.opener.unlinkpath(fname, ignoremissing=True)
462 462
463 463 def writerej(self, fname, failed, total, lines):
464 464 fname = fname + ".rej"
465 465 self.ui.warn(
466 466 _("%d out of %d hunks FAILED -- saving rejects to file %s\n") %
467 467 (failed, total, fname))
468 468 fp = self.opener(fname, 'w')
469 469 fp.writelines(lines)
470 470 fp.close()
471 471
472 472 def exists(self, fname):
473 473 return self.opener.lexists(fname)
474 474
475 475 class workingbackend(fsbackend):
476 476 def __init__(self, ui, repo, similarity):
477 477 super(workingbackend, self).__init__(ui, repo.root)
478 478 self.repo = repo
479 479 self.similarity = similarity
480 480 self.removed = set()
481 481 self.changed = set()
482 482 self.copied = []
483 483
484 484 def _checkknown(self, fname):
485 485 if self.repo.dirstate[fname] == '?' and self.exists(fname):
486 486 raise PatchError(_('cannot patch %s: file is not tracked') % fname)
487 487
488 488 def setfile(self, fname, data, mode, copysource):
489 489 self._checkknown(fname)
490 490 super(workingbackend, self).setfile(fname, data, mode, copysource)
491 491 if copysource is not None:
492 492 self.copied.append((copysource, fname))
493 493 self.changed.add(fname)
494 494
495 495 def unlink(self, fname):
496 496 self._checkknown(fname)
497 497 super(workingbackend, self).unlink(fname)
498 498 self.removed.add(fname)
499 499 self.changed.add(fname)
500 500
501 501 def close(self):
502 502 wctx = self.repo[None]
503 503 changed = set(self.changed)
504 504 for src, dst in self.copied:
505 505 scmutil.dirstatecopy(self.ui, self.repo, wctx, src, dst)
506 506 if self.removed:
507 507 wctx.forget(sorted(self.removed))
508 508 for f in self.removed:
509 509 if f not in self.repo.dirstate:
510 510 # File was deleted and no longer belongs to the
511 511 # dirstate, it was probably marked added then
512 512 # deleted, and should not be considered by
513 513 # marktouched().
514 514 changed.discard(f)
515 515 if changed:
516 516 scmutil.marktouched(self.repo, changed, self.similarity)
517 517 return sorted(self.changed)
518 518
519 519 class filestore(object):
520 520 def __init__(self, maxsize=None):
521 521 self.opener = None
522 522 self.files = {}
523 523 self.created = 0
524 524 self.maxsize = maxsize
525 525 if self.maxsize is None:
526 526 self.maxsize = 4*(2**20)
527 527 self.size = 0
528 528 self.data = {}
529 529
530 530 def setfile(self, fname, data, mode, copied=None):
531 531 if self.maxsize < 0 or (len(data) + self.size) <= self.maxsize:
532 532 self.data[fname] = (data, mode, copied)
533 533 self.size += len(data)
534 534 else:
535 535 if self.opener is None:
536 536 root = tempfile.mkdtemp(prefix='hg-patch-')
537 537 self.opener = scmutil.opener(root)
538 538 # Avoid filename issues with these simple names
539 539 fn = str(self.created)
540 540 self.opener.write(fn, data)
541 541 self.created += 1
542 542 self.files[fname] = (fn, mode, copied)
543 543
544 544 def getfile(self, fname):
545 545 if fname in self.data:
546 546 return self.data[fname]
547 547 if not self.opener or fname not in self.files:
548 548 return None, None, None
549 549 fn, mode, copied = self.files[fname]
550 550 return self.opener.read(fn), mode, copied
551 551
552 552 def close(self):
553 553 if self.opener:
554 554 shutil.rmtree(self.opener.base)
555 555
556 556 class repobackend(abstractbackend):
557 557 def __init__(self, ui, repo, ctx, store):
558 558 super(repobackend, self).__init__(ui)
559 559 self.repo = repo
560 560 self.ctx = ctx
561 561 self.store = store
562 562 self.changed = set()
563 563 self.removed = set()
564 564 self.copied = {}
565 565
566 566 def _checkknown(self, fname):
567 567 if fname not in self.ctx:
568 568 raise PatchError(_('cannot patch %s: file is not tracked') % fname)
569 569
570 570 def getfile(self, fname):
571 571 try:
572 572 fctx = self.ctx[fname]
573 573 except error.LookupError:
574 574 return None, None
575 575 flags = fctx.flags()
576 576 return fctx.data(), ('l' in flags, 'x' in flags)
577 577
578 578 def setfile(self, fname, data, mode, copysource):
579 579 if copysource:
580 580 self._checkknown(copysource)
581 581 if data is None:
582 582 data = self.ctx[fname].data()
583 583 self.store.setfile(fname, data, mode, copysource)
584 584 self.changed.add(fname)
585 585 if copysource:
586 586 self.copied[fname] = copysource
587 587
588 588 def unlink(self, fname):
589 589 self._checkknown(fname)
590 590 self.removed.add(fname)
591 591
592 592 def exists(self, fname):
593 593 return fname in self.ctx
594 594
595 595 def close(self):
596 596 return self.changed | self.removed
597 597
598 598 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
599 599 unidesc = re.compile('@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@')
600 600 contextdesc = re.compile('(?:---|\*\*\*) (\d+)(?:,(\d+))? (?:---|\*\*\*)')
601 601 eolmodes = ['strict', 'crlf', 'lf', 'auto']
602 602
603 603 class patchfile(object):
604 604 def __init__(self, ui, gp, backend, store, eolmode='strict'):
605 605 self.fname = gp.path
606 606 self.eolmode = eolmode
607 607 self.eol = None
608 608 self.backend = backend
609 609 self.ui = ui
610 610 self.lines = []
611 611 self.exists = False
612 612 self.missing = True
613 613 self.mode = gp.mode
614 614 self.copysource = gp.oldpath
615 615 self.create = gp.op in ('ADD', 'COPY', 'RENAME')
616 616 self.remove = gp.op == 'DELETE'
617 617 if self.copysource is None:
618 618 data, mode = backend.getfile(self.fname)
619 619 else:
620 620 data, mode = store.getfile(self.copysource)[:2]
621 621 if data is not None:
622 622 self.exists = self.copysource is None or backend.exists(self.fname)
623 623 self.missing = False
624 624 if data:
625 625 self.lines = mdiff.splitnewlines(data)
626 626 if self.mode is None:
627 627 self.mode = mode
628 628 if self.lines:
629 629 # Normalize line endings
630 630 if self.lines[0].endswith('\r\n'):
631 631 self.eol = '\r\n'
632 632 elif self.lines[0].endswith('\n'):
633 633 self.eol = '\n'
634 634 if eolmode != 'strict':
635 635 nlines = []
636 636 for l in self.lines:
637 637 if l.endswith('\r\n'):
638 638 l = l[:-2] + '\n'
639 639 nlines.append(l)
640 640 self.lines = nlines
641 641 else:
642 642 if self.create:
643 643 self.missing = False
644 644 if self.mode is None:
645 645 self.mode = (False, False)
646 646 if self.missing:
647 647 self.ui.warn(_("unable to find '%s' for patching\n") % self.fname)
648 648
649 649 self.hash = {}
650 650 self.dirty = 0
651 651 self.offset = 0
652 652 self.skew = 0
653 653 self.rej = []
654 654 self.fileprinted = False
655 655 self.printfile(False)
656 656 self.hunks = 0
657 657
658 658 def writelines(self, fname, lines, mode):
659 659 if self.eolmode == 'auto':
660 660 eol = self.eol
661 661 elif self.eolmode == 'crlf':
662 662 eol = '\r\n'
663 663 else:
664 664 eol = '\n'
665 665
666 666 if self.eolmode != 'strict' and eol and eol != '\n':
667 667 rawlines = []
668 668 for l in lines:
669 669 if l and l[-1] == '\n':
670 670 l = l[:-1] + eol
671 671 rawlines.append(l)
672 672 lines = rawlines
673 673
674 674 self.backend.setfile(fname, ''.join(lines), mode, self.copysource)
675 675
676 676 def printfile(self, warn):
677 677 if self.fileprinted:
678 678 return
679 679 if warn or self.ui.verbose:
680 680 self.fileprinted = True
681 681 s = _("patching file %s\n") % self.fname
682 682 if warn:
683 683 self.ui.warn(s)
684 684 else:
685 685 self.ui.note(s)
686 686
687 687
688 688 def findlines(self, l, linenum):
689 689 # looks through the hash and finds candidate lines. The
690 690 # result is a list of line numbers sorted based on distance
691 691 # from linenum
692 692
693 693 cand = self.hash.get(l, [])
694 694 if len(cand) > 1:
695 695 # resort our list of potentials forward then back.
696 696 cand.sort(key=lambda x: abs(x - linenum))
697 697 return cand
698 698
699 699 def write_rej(self):
700 700 # our rejects are a little different from patch(1). This always
701 701 # creates rejects in the same form as the original patch. A file
702 702 # header is inserted so that you can run the reject through patch again
703 703 # without having to type the filename.
704 704 if not self.rej:
705 705 return
706 706 base = os.path.basename(self.fname)
707 707 lines = ["--- %s\n+++ %s\n" % (base, base)]
708 708 for x in self.rej:
709 709 for l in x.hunk:
710 710 lines.append(l)
711 711 if l[-1] != '\n':
712 712 lines.append("\n\ No newline at end of file\n")
713 713 self.backend.writerej(self.fname, len(self.rej), self.hunks, lines)
714 714
715 715 def apply(self, h):
716 716 if not h.complete():
717 717 raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
718 718 (h.number, h.desc, len(h.a), h.lena, len(h.b),
719 719 h.lenb))
720 720
721 721 self.hunks += 1
722 722
723 723 if self.missing:
724 724 self.rej.append(h)
725 725 return -1
726 726
727 727 if self.exists and self.create:
728 728 if self.copysource:
729 729 self.ui.warn(_("cannot create %s: destination already "
730 730 "exists\n") % self.fname)
731 731 else:
732 732 self.ui.warn(_("file %s already exists\n") % self.fname)
733 733 self.rej.append(h)
734 734 return -1
735 735
736 736 if isinstance(h, binhunk):
737 737 if self.remove:
738 738 self.backend.unlink(self.fname)
739 739 else:
740 740 l = h.new(self.lines)
741 741 self.lines[:] = l
742 742 self.offset += len(l)
743 743 self.dirty = True
744 744 return 0
745 745
746 746 horig = h
747 747 if (self.eolmode in ('crlf', 'lf')
748 748 or self.eolmode == 'auto' and self.eol):
749 749 # If new eols are going to be normalized, then normalize
750 750 # hunk data before patching. Otherwise, preserve input
751 751 # line-endings.
752 752 h = h.getnormalized()
753 753
754 754 # fast case first, no offsets, no fuzz
755 755 old, oldstart, new, newstart = h.fuzzit(0, False)
756 756 oldstart += self.offset
757 757 orig_start = oldstart
758 758 # if there's skew we want to emit the "(offset %d lines)" even
759 759 # when the hunk cleanly applies at start + skew, so skip the
760 760 # fast case code
761 761 if (self.skew == 0 and
762 762 diffhelpers.testhunk(old, self.lines, oldstart) == 0):
763 763 if self.remove:
764 764 self.backend.unlink(self.fname)
765 765 else:
766 766 self.lines[oldstart:oldstart + len(old)] = new
767 767 self.offset += len(new) - len(old)
768 768 self.dirty = True
769 769 return 0
770 770
771 771 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
772 772 self.hash = {}
773 773 for x, s in enumerate(self.lines):
774 774 self.hash.setdefault(s, []).append(x)
775 775
776 776 for fuzzlen in xrange(3):
777 777 for toponly in [True, False]:
778 778 old, oldstart, new, newstart = h.fuzzit(fuzzlen, toponly)
779 779 oldstart = oldstart + self.offset + self.skew
780 780 oldstart = min(oldstart, len(self.lines))
781 781 if old:
782 782 cand = self.findlines(old[0][1:], oldstart)
783 783 else:
784 784 # Only adding lines with no or fuzzed context, just
785 785 # take the skew in account
786 786 cand = [oldstart]
787 787
788 788 for l in cand:
789 789 if not old or diffhelpers.testhunk(old, self.lines, l) == 0:
790 790 self.lines[l : l + len(old)] = new
791 791 self.offset += len(new) - len(old)
792 792 self.skew = l - orig_start
793 793 self.dirty = True
794 794 offset = l - orig_start - fuzzlen
795 795 if fuzzlen:
796 796 msg = _("Hunk #%d succeeded at %d "
797 797 "with fuzz %d "
798 798 "(offset %d lines).\n")
799 799 self.printfile(True)
800 800 self.ui.warn(msg %
801 801 (h.number, l + 1, fuzzlen, offset))
802 802 else:
803 803 msg = _("Hunk #%d succeeded at %d "
804 804 "(offset %d lines).\n")
805 805 self.ui.note(msg % (h.number, l + 1, offset))
806 806 return fuzzlen
807 807 self.printfile(True)
808 808 self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
809 809 self.rej.append(horig)
810 810 return -1
811 811
812 812 def close(self):
813 813 if self.dirty:
814 814 self.writelines(self.fname, self.lines, self.mode)
815 815 self.write_rej()
816 816 return len(self.rej)
817 817
818 818 class header(object):
819 819 """patch header
820 820 """
821 821 diffgit_re = re.compile('diff --git a/(.*) b/(.*)$')
822 822 diff_re = re.compile('diff -r .* (.*)$')
823 823 allhunks_re = re.compile('(?:index|deleted file) ')
824 824 pretty_re = re.compile('(?:new file|deleted file) ')
825 825 special_re = re.compile('(?:index|deleted|copy|rename) ')
826 826 newfile_re = re.compile('(?:new file)')
827 827
828 828 def __init__(self, header):
829 829 self.header = header
830 830 self.hunks = []
831 831
832 832 def binary(self):
833 833 return any(h.startswith('index ') for h in self.header)
834 834
835 835 def pretty(self, fp):
836 836 for h in self.header:
837 837 if h.startswith('index '):
838 838 fp.write(_('this modifies a binary file (all or nothing)\n'))
839 839 break
840 840 if self.pretty_re.match(h):
841 841 fp.write(h)
842 842 if self.binary():
843 843 fp.write(_('this is a binary file\n'))
844 844 break
845 845 if h.startswith('---'):
846 846 fp.write(_('%d hunks, %d lines changed\n') %
847 847 (len(self.hunks),
848 848 sum([max(h.added, h.removed) for h in self.hunks])))
849 849 break
850 850 fp.write(h)
851 851
852 852 def write(self, fp):
853 853 fp.write(''.join(self.header))
854 854
855 855 def allhunks(self):
856 856 return any(self.allhunks_re.match(h) for h in self.header)
857 857
858 858 def files(self):
859 859 match = self.diffgit_re.match(self.header[0])
860 860 if match:
861 861 fromfile, tofile = match.groups()
862 862 if fromfile == tofile:
863 863 return [fromfile]
864 864 return [fromfile, tofile]
865 865 else:
866 866 return self.diff_re.match(self.header[0]).groups()
867 867
868 868 def filename(self):
869 869 return self.files()[-1]
870 870
871 871 def __repr__(self):
872 872 return '<header %s>' % (' '.join(map(repr, self.files())))
873 873
874 874 def isnewfile(self):
875 875 return any(self.newfile_re.match(h) for h in self.header)
876 876
877 877 def special(self):
878 878 # Special files are shown only at the header level and not at the hunk
879 879 # level for example a file that has been deleted is a special file.
880 880 # The user cannot change the content of the operation, in the case of
881 881 # the deleted file he has to take the deletion or not take it, he
882 882 # cannot take some of it.
883 883 # Newly added files are special if they are empty, they are not special
884 884 # if they have some content as we want to be able to change it
885 885 nocontent = len(self.header) == 2
886 886 emptynewfile = self.isnewfile() and nocontent
887 887 return emptynewfile or \
888 888 any(self.special_re.match(h) for h in self.header)
889 889
890 890 class recordhunk(object):
891 891 """patch hunk
892 892
893 893 XXX shouldn't we merge this with the other hunk class?
894 894 """
895 895 maxcontext = 3
896 896
897 897 def __init__(self, header, fromline, toline, proc, before, hunk, after):
898 898 def trimcontext(number, lines):
899 899 delta = len(lines) - self.maxcontext
900 900 if False and delta > 0:
901 901 return number + delta, lines[:self.maxcontext]
902 902 return number, lines
903 903
904 904 self.header = header
905 905 self.fromline, self.before = trimcontext(fromline, before)
906 906 self.toline, self.after = trimcontext(toline, after)
907 907 self.proc = proc
908 908 self.hunk = hunk
909 909 self.added, self.removed = self.countchanges(self.hunk)
910 910
911 911 def __eq__(self, v):
912 912 if not isinstance(v, recordhunk):
913 913 return False
914 914
915 915 return ((v.hunk == self.hunk) and
916 916 (v.proc == self.proc) and
917 917 (self.fromline == v.fromline) and
918 918 (self.header.files() == v.header.files()))
919 919
920 920 def __hash__(self):
921 921 return hash((tuple(self.hunk),
922 922 tuple(self.header.files()),
923 923 self.fromline,
924 924 self.proc))
925 925
926 926 def countchanges(self, hunk):
927 927 """hunk -> (n+,n-)"""
928 928 add = len([h for h in hunk if h[0] == '+'])
929 929 rem = len([h for h in hunk if h[0] == '-'])
930 930 return add, rem
931 931
932 932 def write(self, fp):
933 933 delta = len(self.before) + len(self.after)
934 934 if self.after and self.after[-1] == '\\ No newline at end of file\n':
935 935 delta -= 1
936 936 fromlen = delta + self.removed
937 937 tolen = delta + self.added
938 938 fp.write('@@ -%d,%d +%d,%d @@%s\n' %
939 939 (self.fromline, fromlen, self.toline, tolen,
940 940 self.proc and (' ' + self.proc)))
941 941 fp.write(''.join(self.before + self.hunk + self.after))
942 942
943 943 pretty = write
944 944
945 945 def filename(self):
946 946 return self.header.filename()
947 947
948 948 def __repr__(self):
949 949 return '<hunk %r@%d>' % (self.filename(), self.fromline)
950 950
951 951 def filterpatch(ui, headers, operation=None):
952 952 """Interactively filter patch chunks into applied-only chunks"""
953 953 if operation is None:
954 954 operation = _('record')
955 955
956 956 def prompt(skipfile, skipall, query, chunk):
957 957 """prompt query, and process base inputs
958 958
959 959 - y/n for the rest of file
960 960 - y/n for the rest
961 961 - ? (help)
962 962 - q (quit)
963 963
964 964 Return True/False and possibly updated skipfile and skipall.
965 965 """
966 966 newpatches = None
967 967 if skipall is not None:
968 968 return skipall, skipfile, skipall, newpatches
969 969 if skipfile is not None:
970 970 return skipfile, skipfile, skipall, newpatches
971 971 while True:
972 972 resps = _('[Ynesfdaq?]'
973 973 '$$ &Yes, record this change'
974 974 '$$ &No, skip this change'
975 975 '$$ &Edit this change manually'
976 976 '$$ &Skip remaining changes to this file'
977 977 '$$ Record remaining changes to this &file'
978 978 '$$ &Done, skip remaining changes and files'
979 979 '$$ Record &all changes to all remaining files'
980 980 '$$ &Quit, recording no changes'
981 981 '$$ &? (display help)')
982 982 r = ui.promptchoice("%s %s" % (query, resps))
983 983 ui.write("\n")
984 984 if r == 8: # ?
985 985 for c, t in ui.extractchoices(resps)[1]:
986 986 ui.write('%s - %s\n' % (c, t.lower()))
987 987 continue
988 988 elif r == 0: # yes
989 989 ret = True
990 990 elif r == 1: # no
991 991 ret = False
992 992 elif r == 2: # Edit patch
993 993 if chunk is None:
994 994 ui.write(_('cannot edit patch for whole file'))
995 995 ui.write("\n")
996 996 continue
997 997 if chunk.header.binary():
998 998 ui.write(_('cannot edit patch for binary file'))
999 999 ui.write("\n")
1000 1000 continue
1001 1001 # Patch comment based on the Git one (based on comment at end of
1002 1002 # http://mercurial.selenic.com/wiki/RecordExtension)
1003 1003 phelp = '---' + _("""
1004 1004 To remove '-' lines, make them ' ' lines (context).
1005 1005 To remove '+' lines, delete them.
1006 1006 Lines starting with # will be removed from the patch.
1007 1007
1008 1008 If the patch applies cleanly, the edited hunk will immediately be
1009 1009 added to the record list. If it does not apply cleanly, a rejects
1010 1010 file will be generated: you can use that when you try again. If
1011 1011 all lines of the hunk are removed, then the edit is aborted and
1012 1012 the hunk is left unchanged.
1013 1013 """)
1014 1014 (patchfd, patchfn) = tempfile.mkstemp(prefix="hg-editor-",
1015 1015 suffix=".diff", text=True)
1016 1016 ncpatchfp = None
1017 1017 try:
1018 1018 # Write the initial patch
1019 1019 f = os.fdopen(patchfd, "w")
1020 1020 chunk.header.write(f)
1021 1021 chunk.write(f)
1022 1022 f.write('\n'.join(['# ' + i for i in phelp.splitlines()]))
1023 1023 f.close()
1024 1024 # Start the editor and wait for it to complete
1025 1025 editor = ui.geteditor()
1026 1026 ui.system("%s \"%s\"" % (editor, patchfn),
1027 1027 environ={'HGUSER': ui.username()},
1028 1028 onerr=util.Abort, errprefix=_("edit failed"))
1029 1029 # Remove comment lines
1030 1030 patchfp = open(patchfn)
1031 1031 ncpatchfp = cStringIO.StringIO()
1032 1032 for line in patchfp:
1033 1033 if not line.startswith('#'):
1034 1034 ncpatchfp.write(line)
1035 1035 patchfp.close()
1036 1036 ncpatchfp.seek(0)
1037 1037 newpatches = parsepatch(ncpatchfp)
1038 1038 finally:
1039 1039 os.unlink(patchfn)
1040 1040 del ncpatchfp
1041 1041 # Signal that the chunk shouldn't be applied as-is, but
1042 1042 # provide the new patch to be used instead.
1043 1043 ret = False
1044 1044 elif r == 3: # Skip
1045 1045 ret = skipfile = False
1046 1046 elif r == 4: # file (Record remaining)
1047 1047 ret = skipfile = True
1048 1048 elif r == 5: # done, skip remaining
1049 1049 ret = skipall = False
1050 1050 elif r == 6: # all
1051 1051 ret = skipall = True
1052 1052 elif r == 7: # quit
1053 1053 raise util.Abort(_('user quit'))
1054 1054 return ret, skipfile, skipall, newpatches
1055 1055
1056 1056 seen = set()
1057 1057 applied = {} # 'filename' -> [] of chunks
1058 1058 skipfile, skipall = None, None
1059 1059 pos, total = 1, sum(len(h.hunks) for h in headers)
1060 1060 for h in headers:
1061 1061 pos += len(h.hunks)
1062 1062 skipfile = None
1063 1063 fixoffset = 0
1064 1064 hdr = ''.join(h.header)
1065 1065 if hdr in seen:
1066 1066 continue
1067 1067 seen.add(hdr)
1068 1068 if skipall is None:
1069 1069 h.pretty(ui)
1070 1070 msg = (_('examine changes to %s?') %
1071 1071 _(' and ').join("'%s'" % f for f in h.files()))
1072 1072 r, skipfile, skipall, np = prompt(skipfile, skipall, msg, None)
1073 1073 if not r:
1074 1074 continue
1075 1075 applied[h.filename()] = [h]
1076 1076 if h.allhunks():
1077 1077 applied[h.filename()] += h.hunks
1078 1078 continue
1079 1079 for i, chunk in enumerate(h.hunks):
1080 1080 if skipfile is None and skipall is None:
1081 1081 chunk.pretty(ui)
1082 1082 if total == 1:
1083 1083 msg = _("record this change to '%s'?") % chunk.filename()
1084 1084 else:
1085 1085 idx = pos - len(h.hunks) + i
1086 1086 msg = _("record change %d/%d to '%s'?") % (idx, total,
1087 1087 chunk.filename())
1088 1088 r, skipfile, skipall, newpatches = prompt(skipfile,
1089 1089 skipall, msg, chunk)
1090 1090 if r:
1091 1091 if fixoffset:
1092 1092 chunk = copy.copy(chunk)
1093 1093 chunk.toline += fixoffset
1094 1094 applied[chunk.filename()].append(chunk)
1095 1095 elif newpatches is not None:
1096 1096 for newpatch in newpatches:
1097 1097 for newhunk in newpatch.hunks:
1098 1098 if fixoffset:
1099 1099 newhunk.toline += fixoffset
1100 1100 applied[newhunk.filename()].append(newhunk)
1101 1101 else:
1102 1102 fixoffset += chunk.removed - chunk.added
1103 1103 return sum([h for h in applied.itervalues()
1104 1104 if h[0].special() or len(h) > 1], [])
1105 1105 class hunk(object):
1106 1106 def __init__(self, desc, num, lr, context):
1107 1107 self.number = num
1108 1108 self.desc = desc
1109 1109 self.hunk = [desc]
1110 1110 self.a = []
1111 1111 self.b = []
1112 1112 self.starta = self.lena = None
1113 1113 self.startb = self.lenb = None
1114 1114 if lr is not None:
1115 1115 if context:
1116 1116 self.read_context_hunk(lr)
1117 1117 else:
1118 1118 self.read_unified_hunk(lr)
1119 1119
1120 1120 def getnormalized(self):
1121 1121 """Return a copy with line endings normalized to LF."""
1122 1122
1123 1123 def normalize(lines):
1124 1124 nlines = []
1125 1125 for line in lines:
1126 1126 if line.endswith('\r\n'):
1127 1127 line = line[:-2] + '\n'
1128 1128 nlines.append(line)
1129 1129 return nlines
1130 1130
1131 1131 # Dummy object, it is rebuilt manually
1132 1132 nh = hunk(self.desc, self.number, None, None)
1133 1133 nh.number = self.number
1134 1134 nh.desc = self.desc
1135 1135 nh.hunk = self.hunk
1136 1136 nh.a = normalize(self.a)
1137 1137 nh.b = normalize(self.b)
1138 1138 nh.starta = self.starta
1139 1139 nh.startb = self.startb
1140 1140 nh.lena = self.lena
1141 1141 nh.lenb = self.lenb
1142 1142 return nh
1143 1143
1144 1144 def read_unified_hunk(self, lr):
1145 1145 m = unidesc.match(self.desc)
1146 1146 if not m:
1147 1147 raise PatchError(_("bad hunk #%d") % self.number)
1148 1148 self.starta, self.lena, self.startb, self.lenb = m.groups()
1149 1149 if self.lena is None:
1150 1150 self.lena = 1
1151 1151 else:
1152 1152 self.lena = int(self.lena)
1153 1153 if self.lenb is None:
1154 1154 self.lenb = 1
1155 1155 else:
1156 1156 self.lenb = int(self.lenb)
1157 1157 self.starta = int(self.starta)
1158 1158 self.startb = int(self.startb)
1159 1159 diffhelpers.addlines(lr, self.hunk, self.lena, self.lenb, self.a,
1160 1160 self.b)
1161 1161 # if we hit eof before finishing out the hunk, the last line will
1162 1162 # be zero length. Lets try to fix it up.
1163 1163 while len(self.hunk[-1]) == 0:
1164 1164 del self.hunk[-1]
1165 1165 del self.a[-1]
1166 1166 del self.b[-1]
1167 1167 self.lena -= 1
1168 1168 self.lenb -= 1
1169 1169 self._fixnewline(lr)
1170 1170
1171 1171 def read_context_hunk(self, lr):
1172 1172 self.desc = lr.readline()
1173 1173 m = contextdesc.match(self.desc)
1174 1174 if not m:
1175 1175 raise PatchError(_("bad hunk #%d") % self.number)
1176 1176 self.starta, aend = m.groups()
1177 1177 self.starta = int(self.starta)
1178 1178 if aend is None:
1179 1179 aend = self.starta
1180 1180 self.lena = int(aend) - self.starta
1181 1181 if self.starta:
1182 1182 self.lena += 1
1183 1183 for x in xrange(self.lena):
1184 1184 l = lr.readline()
1185 1185 if l.startswith('---'):
1186 1186 # lines addition, old block is empty
1187 1187 lr.push(l)
1188 1188 break
1189 1189 s = l[2:]
1190 1190 if l.startswith('- ') or l.startswith('! '):
1191 1191 u = '-' + s
1192 1192 elif l.startswith(' '):
1193 1193 u = ' ' + s
1194 1194 else:
1195 1195 raise PatchError(_("bad hunk #%d old text line %d") %
1196 1196 (self.number, x))
1197 1197 self.a.append(u)
1198 1198 self.hunk.append(u)
1199 1199
1200 1200 l = lr.readline()
1201 1201 if l.startswith('\ '):
1202 1202 s = self.a[-1][:-1]
1203 1203 self.a[-1] = s
1204 1204 self.hunk[-1] = s
1205 1205 l = lr.readline()
1206 1206 m = contextdesc.match(l)
1207 1207 if not m:
1208 1208 raise PatchError(_("bad hunk #%d") % self.number)
1209 1209 self.startb, bend = m.groups()
1210 1210 self.startb = int(self.startb)
1211 1211 if bend is None:
1212 1212 bend = self.startb
1213 1213 self.lenb = int(bend) - self.startb
1214 1214 if self.startb:
1215 1215 self.lenb += 1
1216 1216 hunki = 1
1217 1217 for x in xrange(self.lenb):
1218 1218 l = lr.readline()
1219 1219 if l.startswith('\ '):
1220 1220 # XXX: the only way to hit this is with an invalid line range.
1221 1221 # The no-eol marker is not counted in the line range, but I
1222 1222 # guess there are diff(1) out there which behave differently.
1223 1223 s = self.b[-1][:-1]
1224 1224 self.b[-1] = s
1225 1225 self.hunk[hunki - 1] = s
1226 1226 continue
1227 1227 if not l:
1228 1228 # line deletions, new block is empty and we hit EOF
1229 1229 lr.push(l)
1230 1230 break
1231 1231 s = l[2:]
1232 1232 if l.startswith('+ ') or l.startswith('! '):
1233 1233 u = '+' + s
1234 1234 elif l.startswith(' '):
1235 1235 u = ' ' + s
1236 1236 elif len(self.b) == 0:
1237 1237 # line deletions, new block is empty
1238 1238 lr.push(l)
1239 1239 break
1240 1240 else:
1241 1241 raise PatchError(_("bad hunk #%d old text line %d") %
1242 1242 (self.number, x))
1243 1243 self.b.append(s)
1244 1244 while True:
1245 1245 if hunki >= len(self.hunk):
1246 1246 h = ""
1247 1247 else:
1248 1248 h = self.hunk[hunki]
1249 1249 hunki += 1
1250 1250 if h == u:
1251 1251 break
1252 1252 elif h.startswith('-'):
1253 1253 continue
1254 1254 else:
1255 1255 self.hunk.insert(hunki - 1, u)
1256 1256 break
1257 1257
1258 1258 if not self.a:
1259 1259 # this happens when lines were only added to the hunk
1260 1260 for x in self.hunk:
1261 1261 if x.startswith('-') or x.startswith(' '):
1262 1262 self.a.append(x)
1263 1263 if not self.b:
1264 1264 # this happens when lines were only deleted from the hunk
1265 1265 for x in self.hunk:
1266 1266 if x.startswith('+') or x.startswith(' '):
1267 1267 self.b.append(x[1:])
1268 1268 # @@ -start,len +start,len @@
1269 1269 self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
1270 1270 self.startb, self.lenb)
1271 1271 self.hunk[0] = self.desc
1272 1272 self._fixnewline(lr)
1273 1273
1274 1274 def _fixnewline(self, lr):
1275 1275 l = lr.readline()
1276 1276 if l.startswith('\ '):
1277 1277 diffhelpers.fix_newline(self.hunk, self.a, self.b)
1278 1278 else:
1279 1279 lr.push(l)
1280 1280
1281 1281 def complete(self):
1282 1282 return len(self.a) == self.lena and len(self.b) == self.lenb
1283 1283
1284 1284 def _fuzzit(self, old, new, fuzz, toponly):
1285 1285 # this removes context lines from the top and bottom of list 'l'. It
1286 1286 # checks the hunk to make sure only context lines are removed, and then
1287 1287 # returns a new shortened list of lines.
1288 1288 fuzz = min(fuzz, len(old))
1289 1289 if fuzz:
1290 1290 top = 0
1291 1291 bot = 0
1292 1292 hlen = len(self.hunk)
1293 1293 for x in xrange(hlen - 1):
1294 1294 # the hunk starts with the @@ line, so use x+1
1295 1295 if self.hunk[x + 1][0] == ' ':
1296 1296 top += 1
1297 1297 else:
1298 1298 break
1299 1299 if not toponly:
1300 1300 for x in xrange(hlen - 1):
1301 1301 if self.hunk[hlen - bot - 1][0] == ' ':
1302 1302 bot += 1
1303 1303 else:
1304 1304 break
1305 1305
1306 1306 bot = min(fuzz, bot)
1307 1307 top = min(fuzz, top)
1308 1308 return old[top:len(old) - bot], new[top:len(new) - bot], top
1309 1309 return old, new, 0
1310 1310
1311 1311 def fuzzit(self, fuzz, toponly):
1312 1312 old, new, top = self._fuzzit(self.a, self.b, fuzz, toponly)
1313 1313 oldstart = self.starta + top
1314 1314 newstart = self.startb + top
1315 1315 # zero length hunk ranges already have their start decremented
1316 1316 if self.lena and oldstart > 0:
1317 1317 oldstart -= 1
1318 1318 if self.lenb and newstart > 0:
1319 1319 newstart -= 1
1320 1320 return old, oldstart, new, newstart
1321 1321
1322 1322 class binhunk(object):
1323 1323 'A binary patch file.'
1324 1324 def __init__(self, lr, fname):
1325 1325 self.text = None
1326 1326 self.delta = False
1327 1327 self.hunk = ['GIT binary patch\n']
1328 1328 self._fname = fname
1329 1329 self._read(lr)
1330 1330
1331 1331 def complete(self):
1332 1332 return self.text is not None
1333 1333
1334 1334 def new(self, lines):
1335 1335 if self.delta:
1336 1336 return [applybindelta(self.text, ''.join(lines))]
1337 1337 return [self.text]
1338 1338
1339 1339 def _read(self, lr):
1340 1340 def getline(lr, hunk):
1341 1341 l = lr.readline()
1342 1342 hunk.append(l)
1343 1343 return l.rstrip('\r\n')
1344 1344
1345 1345 size = 0
1346 1346 while True:
1347 1347 line = getline(lr, self.hunk)
1348 1348 if not line:
1349 1349 raise PatchError(_('could not extract "%s" binary data')
1350 1350 % self._fname)
1351 1351 if line.startswith('literal '):
1352 1352 size = int(line[8:].rstrip())
1353 1353 break
1354 1354 if line.startswith('delta '):
1355 1355 size = int(line[6:].rstrip())
1356 1356 self.delta = True
1357 1357 break
1358 1358 dec = []
1359 1359 line = getline(lr, self.hunk)
1360 1360 while len(line) > 1:
1361 1361 l = line[0]
1362 1362 if l <= 'Z' and l >= 'A':
1363 1363 l = ord(l) - ord('A') + 1
1364 1364 else:
1365 1365 l = ord(l) - ord('a') + 27
1366 1366 try:
1367 1367 dec.append(base85.b85decode(line[1:])[:l])
1368 1368 except ValueError, e:
1369 1369 raise PatchError(_('could not decode "%s" binary patch: %s')
1370 1370 % (self._fname, str(e)))
1371 1371 line = getline(lr, self.hunk)
1372 1372 text = zlib.decompress(''.join(dec))
1373 1373 if len(text) != size:
1374 1374 raise PatchError(_('"%s" length is %d bytes, should be %d')
1375 1375 % (self._fname, len(text), size))
1376 1376 self.text = text
1377 1377
1378 1378 def parsefilename(str):
1379 1379 # --- filename \t|space stuff
1380 1380 s = str[4:].rstrip('\r\n')
1381 1381 i = s.find('\t')
1382 1382 if i < 0:
1383 1383 i = s.find(' ')
1384 1384 if i < 0:
1385 1385 return s
1386 1386 return s[:i]
1387 1387
1388 def reversehunks(hunks):
1389 '''reverse the signs in the hunks given as argument
1390
1391 This function operates on hunks coming out of patch.filterpatch, that is
1392 a list of the form: [header1, hunk1, hunk2, header2...]. Example usage:
1393
1394 >>> rawpatch = """diff --git a/folder1/g b/folder1/g
1395 ... --- a/folder1/g
1396 ... +++ b/folder1/g
1397 ... @@ -1,7 +1,7 @@
1398 ... +firstline
1399 ... c
1400 ... 1
1401 ... 2
1402 ... + 3
1403 ... -4
1404 ... 5
1405 ... d
1406 ... +lastline"""
1407 >>> hunks = parsepatch(rawpatch)
1408 >>> hunkscomingfromfilterpatch = []
1409 >>> for h in hunks:
1410 ... hunkscomingfromfilterpatch.append(h)
1411 ... hunkscomingfromfilterpatch.extend(h.hunks)
1412
1413 >>> reversedhunks = reversehunks(hunkscomingfromfilterpatch)
1414 >>> fp = cStringIO.StringIO()
1415 >>> for c in reversedhunks:
1416 ... c.write(fp)
1417 >>> fp.seek(0)
1418 >>> reversedpatch = fp.read()
1419 >>> print reversedpatch
1420 diff --git a/folder1/g b/folder1/g
1421 --- a/folder1/g
1422 +++ b/folder1/g
1423 @@ -1,4 +1,3 @@
1424 -firstline
1425 c
1426 1
1427 2
1428 @@ -1,6 +2,6 @@
1429 c
1430 1
1431 2
1432 - 3
1433 +4
1434 5
1435 d
1436 @@ -5,3 +6,2 @@
1437 5
1438 d
1439 -lastline
1440
1441 '''
1442
1443 import crecord as crecordmod
1444 newhunks = []
1445 for c in hunks:
1446 if isinstance(c, crecordmod.uihunk):
1447 # curses hunks encapsulate the record hunk in _hunk
1448 c = c._hunk
1449 if isinstance(c, recordhunk):
1450 for j, line in enumerate(c.hunk):
1451 if line.startswith("-"):
1452 c.hunk[j] = "+" + c.hunk[j][1:]
1453 elif line.startswith("+"):
1454 c.hunk[j] = "-" + c.hunk[j][1:]
1455 c.added, c.removed = c.removed, c.added
1456 newhunks.append(c)
1457 return newhunks
1458
1388 1459 def parsepatch(originalchunks):
1389 1460 """patch -> [] of headers -> [] of hunks """
1390 1461 class parser(object):
1391 1462 """patch parsing state machine"""
1392 1463 def __init__(self):
1393 1464 self.fromline = 0
1394 1465 self.toline = 0
1395 1466 self.proc = ''
1396 1467 self.header = None
1397 1468 self.context = []
1398 1469 self.before = []
1399 1470 self.hunk = []
1400 1471 self.headers = []
1401 1472
1402 1473 def addrange(self, limits):
1403 1474 fromstart, fromend, tostart, toend, proc = limits
1404 1475 self.fromline = int(fromstart)
1405 1476 self.toline = int(tostart)
1406 1477 self.proc = proc
1407 1478
1408 1479 def addcontext(self, context):
1409 1480 if self.hunk:
1410 1481 h = recordhunk(self.header, self.fromline, self.toline,
1411 1482 self.proc, self.before, self.hunk, context)
1412 1483 self.header.hunks.append(h)
1413 1484 self.fromline += len(self.before) + h.removed
1414 1485 self.toline += len(self.before) + h.added
1415 1486 self.before = []
1416 1487 self.hunk = []
1417 1488 self.proc = ''
1418 1489 self.context = context
1419 1490
1420 1491 def addhunk(self, hunk):
1421 1492 if self.context:
1422 1493 self.before = self.context
1423 1494 self.context = []
1424 1495 self.hunk = hunk
1425 1496
1426 1497 def newfile(self, hdr):
1427 1498 self.addcontext([])
1428 1499 h = header(hdr)
1429 1500 self.headers.append(h)
1430 1501 self.header = h
1431 1502
1432 1503 def addother(self, line):
1433 1504 pass # 'other' lines are ignored
1434 1505
1435 1506 def finished(self):
1436 1507 self.addcontext([])
1437 1508 return self.headers
1438 1509
1439 1510 transitions = {
1440 1511 'file': {'context': addcontext,
1441 1512 'file': newfile,
1442 1513 'hunk': addhunk,
1443 1514 'range': addrange},
1444 1515 'context': {'file': newfile,
1445 1516 'hunk': addhunk,
1446 1517 'range': addrange,
1447 1518 'other': addother},
1448 1519 'hunk': {'context': addcontext,
1449 1520 'file': newfile,
1450 1521 'range': addrange},
1451 1522 'range': {'context': addcontext,
1452 1523 'hunk': addhunk},
1453 1524 'other': {'other': addother},
1454 1525 }
1455 1526
1456 1527 p = parser()
1457 1528 fp = cStringIO.StringIO()
1458 1529 fp.write(''.join(originalchunks))
1459 1530 fp.seek(0)
1460 1531
1461 1532 state = 'context'
1462 1533 for newstate, data in scanpatch(fp):
1463 1534 try:
1464 1535 p.transitions[state][newstate](p, data)
1465 1536 except KeyError:
1466 1537 raise PatchError('unhandled transition: %s -> %s' %
1467 1538 (state, newstate))
1468 1539 state = newstate
1469 1540 del fp
1470 1541 return p.finished()
1471 1542
1472 1543 def pathtransform(path, strip, prefix):
1473 1544 '''turn a path from a patch into a path suitable for the repository
1474 1545
1475 1546 prefix, if not empty, is expected to be normalized with a / at the end.
1476 1547
1477 1548 Returns (stripped components, path in repository).
1478 1549
1479 1550 >>> pathtransform('a/b/c', 0, '')
1480 1551 ('', 'a/b/c')
1481 1552 >>> pathtransform(' a/b/c ', 0, '')
1482 1553 ('', ' a/b/c')
1483 1554 >>> pathtransform(' a/b/c ', 2, '')
1484 1555 ('a/b/', 'c')
1485 1556 >>> pathtransform('a/b/c', 0, 'd/e/')
1486 1557 ('', 'd/e/a/b/c')
1487 1558 >>> pathtransform(' a//b/c ', 2, 'd/e/')
1488 1559 ('a//b/', 'd/e/c')
1489 1560 >>> pathtransform('a/b/c', 3, '')
1490 1561 Traceback (most recent call last):
1491 1562 PatchError: unable to strip away 1 of 3 dirs from a/b/c
1492 1563 '''
1493 1564 pathlen = len(path)
1494 1565 i = 0
1495 1566 if strip == 0:
1496 1567 return '', prefix + path.rstrip()
1497 1568 count = strip
1498 1569 while count > 0:
1499 1570 i = path.find('/', i)
1500 1571 if i == -1:
1501 1572 raise PatchError(_("unable to strip away %d of %d dirs from %s") %
1502 1573 (count, strip, path))
1503 1574 i += 1
1504 1575 # consume '//' in the path
1505 1576 while i < pathlen - 1 and path[i] == '/':
1506 1577 i += 1
1507 1578 count -= 1
1508 1579 return path[:i].lstrip(), prefix + path[i:].rstrip()
1509 1580
1510 1581 def makepatchmeta(backend, afile_orig, bfile_orig, hunk, strip, prefix):
1511 1582 nulla = afile_orig == "/dev/null"
1512 1583 nullb = bfile_orig == "/dev/null"
1513 1584 create = nulla and hunk.starta == 0 and hunk.lena == 0
1514 1585 remove = nullb and hunk.startb == 0 and hunk.lenb == 0
1515 1586 abase, afile = pathtransform(afile_orig, strip, prefix)
1516 1587 gooda = not nulla and backend.exists(afile)
1517 1588 bbase, bfile = pathtransform(bfile_orig, strip, prefix)
1518 1589 if afile == bfile:
1519 1590 goodb = gooda
1520 1591 else:
1521 1592 goodb = not nullb and backend.exists(bfile)
1522 1593 missing = not goodb and not gooda and not create
1523 1594
1524 1595 # some diff programs apparently produce patches where the afile is
1525 1596 # not /dev/null, but afile starts with bfile
1526 1597 abasedir = afile[:afile.rfind('/') + 1]
1527 1598 bbasedir = bfile[:bfile.rfind('/') + 1]
1528 1599 if (missing and abasedir == bbasedir and afile.startswith(bfile)
1529 1600 and hunk.starta == 0 and hunk.lena == 0):
1530 1601 create = True
1531 1602 missing = False
1532 1603
1533 1604 # If afile is "a/b/foo" and bfile is "a/b/foo.orig" we assume the
1534 1605 # diff is between a file and its backup. In this case, the original
1535 1606 # file should be patched (see original mpatch code).
1536 1607 isbackup = (abase == bbase and bfile.startswith(afile))
1537 1608 fname = None
1538 1609 if not missing:
1539 1610 if gooda and goodb:
1540 1611 if isbackup:
1541 1612 fname = afile
1542 1613 else:
1543 1614 fname = bfile
1544 1615 elif gooda:
1545 1616 fname = afile
1546 1617
1547 1618 if not fname:
1548 1619 if not nullb:
1549 1620 if isbackup:
1550 1621 fname = afile
1551 1622 else:
1552 1623 fname = bfile
1553 1624 elif not nulla:
1554 1625 fname = afile
1555 1626 else:
1556 1627 raise PatchError(_("undefined source and destination files"))
1557 1628
1558 1629 gp = patchmeta(fname)
1559 1630 if create:
1560 1631 gp.op = 'ADD'
1561 1632 elif remove:
1562 1633 gp.op = 'DELETE'
1563 1634 return gp
1564 1635
1565 1636 def scanpatch(fp):
1566 1637 """like patch.iterhunks, but yield different events
1567 1638
1568 1639 - ('file', [header_lines + fromfile + tofile])
1569 1640 - ('context', [context_lines])
1570 1641 - ('hunk', [hunk_lines])
1571 1642 - ('range', (-start,len, +start,len, proc))
1572 1643 """
1573 1644 lines_re = re.compile(r'@@ -(\d+),(\d+) \+(\d+),(\d+) @@\s*(.*)')
1574 1645 lr = linereader(fp)
1575 1646
1576 1647 def scanwhile(first, p):
1577 1648 """scan lr while predicate holds"""
1578 1649 lines = [first]
1579 1650 while True:
1580 1651 line = lr.readline()
1581 1652 if not line:
1582 1653 break
1583 1654 if p(line):
1584 1655 lines.append(line)
1585 1656 else:
1586 1657 lr.push(line)
1587 1658 break
1588 1659 return lines
1589 1660
1590 1661 while True:
1591 1662 line = lr.readline()
1592 1663 if not line:
1593 1664 break
1594 1665 if line.startswith('diff --git a/') or line.startswith('diff -r '):
1595 1666 def notheader(line):
1596 1667 s = line.split(None, 1)
1597 1668 return not s or s[0] not in ('---', 'diff')
1598 1669 header = scanwhile(line, notheader)
1599 1670 fromfile = lr.readline()
1600 1671 if fromfile.startswith('---'):
1601 1672 tofile = lr.readline()
1602 1673 header += [fromfile, tofile]
1603 1674 else:
1604 1675 lr.push(fromfile)
1605 1676 yield 'file', header
1606 1677 elif line[0] == ' ':
1607 1678 yield 'context', scanwhile(line, lambda l: l[0] in ' \\')
1608 1679 elif line[0] in '-+':
1609 1680 yield 'hunk', scanwhile(line, lambda l: l[0] in '-+\\')
1610 1681 else:
1611 1682 m = lines_re.match(line)
1612 1683 if m:
1613 1684 yield 'range', m.groups()
1614 1685 else:
1615 1686 yield 'other', line
1616 1687
1617 1688 def scangitpatch(lr, firstline):
1618 1689 """
1619 1690 Git patches can emit:
1620 1691 - rename a to b
1621 1692 - change b
1622 1693 - copy a to c
1623 1694 - change c
1624 1695
1625 1696 We cannot apply this sequence as-is, the renamed 'a' could not be
1626 1697 found for it would have been renamed already. And we cannot copy
1627 1698 from 'b' instead because 'b' would have been changed already. So
1628 1699 we scan the git patch for copy and rename commands so we can
1629 1700 perform the copies ahead of time.
1630 1701 """
1631 1702 pos = 0
1632 1703 try:
1633 1704 pos = lr.fp.tell()
1634 1705 fp = lr.fp
1635 1706 except IOError:
1636 1707 fp = cStringIO.StringIO(lr.fp.read())
1637 1708 gitlr = linereader(fp)
1638 1709 gitlr.push(firstline)
1639 1710 gitpatches = readgitpatch(gitlr)
1640 1711 fp.seek(pos)
1641 1712 return gitpatches
1642 1713
1643 1714 def iterhunks(fp):
1644 1715 """Read a patch and yield the following events:
1645 1716 - ("file", afile, bfile, firsthunk): select a new target file.
1646 1717 - ("hunk", hunk): a new hunk is ready to be applied, follows a
1647 1718 "file" event.
1648 1719 - ("git", gitchanges): current diff is in git format, gitchanges
1649 1720 maps filenames to gitpatch records. Unique event.
1650 1721 """
1651 1722 afile = ""
1652 1723 bfile = ""
1653 1724 state = None
1654 1725 hunknum = 0
1655 1726 emitfile = newfile = False
1656 1727 gitpatches = None
1657 1728
1658 1729 # our states
1659 1730 BFILE = 1
1660 1731 context = None
1661 1732 lr = linereader(fp)
1662 1733
1663 1734 while True:
1664 1735 x = lr.readline()
1665 1736 if not x:
1666 1737 break
1667 1738 if state == BFILE and (
1668 1739 (not context and x[0] == '@')
1669 1740 or (context is not False and x.startswith('***************'))
1670 1741 or x.startswith('GIT binary patch')):
1671 1742 gp = None
1672 1743 if (gitpatches and
1673 1744 gitpatches[-1].ispatching(afile, bfile)):
1674 1745 gp = gitpatches.pop()
1675 1746 if x.startswith('GIT binary patch'):
1676 1747 h = binhunk(lr, gp.path)
1677 1748 else:
1678 1749 if context is None and x.startswith('***************'):
1679 1750 context = True
1680 1751 h = hunk(x, hunknum + 1, lr, context)
1681 1752 hunknum += 1
1682 1753 if emitfile:
1683 1754 emitfile = False
1684 1755 yield 'file', (afile, bfile, h, gp and gp.copy() or None)
1685 1756 yield 'hunk', h
1686 1757 elif x.startswith('diff --git a/'):
1687 1758 m = gitre.match(x.rstrip(' \r\n'))
1688 1759 if not m:
1689 1760 continue
1690 1761 if gitpatches is None:
1691 1762 # scan whole input for git metadata
1692 1763 gitpatches = scangitpatch(lr, x)
1693 1764 yield 'git', [g.copy() for g in gitpatches
1694 1765 if g.op in ('COPY', 'RENAME')]
1695 1766 gitpatches.reverse()
1696 1767 afile = 'a/' + m.group(1)
1697 1768 bfile = 'b/' + m.group(2)
1698 1769 while gitpatches and not gitpatches[-1].ispatching(afile, bfile):
1699 1770 gp = gitpatches.pop()
1700 1771 yield 'file', ('a/' + gp.path, 'b/' + gp.path, None, gp.copy())
1701 1772 if not gitpatches:
1702 1773 raise PatchError(_('failed to synchronize metadata for "%s"')
1703 1774 % afile[2:])
1704 1775 gp = gitpatches[-1]
1705 1776 newfile = True
1706 1777 elif x.startswith('---'):
1707 1778 # check for a unified diff
1708 1779 l2 = lr.readline()
1709 1780 if not l2.startswith('+++'):
1710 1781 lr.push(l2)
1711 1782 continue
1712 1783 newfile = True
1713 1784 context = False
1714 1785 afile = parsefilename(x)
1715 1786 bfile = parsefilename(l2)
1716 1787 elif x.startswith('***'):
1717 1788 # check for a context diff
1718 1789 l2 = lr.readline()
1719 1790 if not l2.startswith('---'):
1720 1791 lr.push(l2)
1721 1792 continue
1722 1793 l3 = lr.readline()
1723 1794 lr.push(l3)
1724 1795 if not l3.startswith("***************"):
1725 1796 lr.push(l2)
1726 1797 continue
1727 1798 newfile = True
1728 1799 context = True
1729 1800 afile = parsefilename(x)
1730 1801 bfile = parsefilename(l2)
1731 1802
1732 1803 if newfile:
1733 1804 newfile = False
1734 1805 emitfile = True
1735 1806 state = BFILE
1736 1807 hunknum = 0
1737 1808
1738 1809 while gitpatches:
1739 1810 gp = gitpatches.pop()
1740 1811 yield 'file', ('a/' + gp.path, 'b/' + gp.path, None, gp.copy())
1741 1812
1742 1813 def applybindelta(binchunk, data):
1743 1814 """Apply a binary delta hunk
1744 1815 The algorithm used is the algorithm from git's patch-delta.c
1745 1816 """
1746 1817 def deltahead(binchunk):
1747 1818 i = 0
1748 1819 for c in binchunk:
1749 1820 i += 1
1750 1821 if not (ord(c) & 0x80):
1751 1822 return i
1752 1823 return i
1753 1824 out = ""
1754 1825 s = deltahead(binchunk)
1755 1826 binchunk = binchunk[s:]
1756 1827 s = deltahead(binchunk)
1757 1828 binchunk = binchunk[s:]
1758 1829 i = 0
1759 1830 while i < len(binchunk):
1760 1831 cmd = ord(binchunk[i])
1761 1832 i += 1
1762 1833 if (cmd & 0x80):
1763 1834 offset = 0
1764 1835 size = 0
1765 1836 if (cmd & 0x01):
1766 1837 offset = ord(binchunk[i])
1767 1838 i += 1
1768 1839 if (cmd & 0x02):
1769 1840 offset |= ord(binchunk[i]) << 8
1770 1841 i += 1
1771 1842 if (cmd & 0x04):
1772 1843 offset |= ord(binchunk[i]) << 16
1773 1844 i += 1
1774 1845 if (cmd & 0x08):
1775 1846 offset |= ord(binchunk[i]) << 24
1776 1847 i += 1
1777 1848 if (cmd & 0x10):
1778 1849 size = ord(binchunk[i])
1779 1850 i += 1
1780 1851 if (cmd & 0x20):
1781 1852 size |= ord(binchunk[i]) << 8
1782 1853 i += 1
1783 1854 if (cmd & 0x40):
1784 1855 size |= ord(binchunk[i]) << 16
1785 1856 i += 1
1786 1857 if size == 0:
1787 1858 size = 0x10000
1788 1859 offset_end = offset + size
1789 1860 out += data[offset:offset_end]
1790 1861 elif cmd != 0:
1791 1862 offset_end = i + cmd
1792 1863 out += binchunk[i:offset_end]
1793 1864 i += cmd
1794 1865 else:
1795 1866 raise PatchError(_('unexpected delta opcode 0'))
1796 1867 return out
1797 1868
1798 1869 def applydiff(ui, fp, backend, store, strip=1, prefix='', eolmode='strict'):
1799 1870 """Reads a patch from fp and tries to apply it.
1800 1871
1801 1872 Returns 0 for a clean patch, -1 if any rejects were found and 1 if
1802 1873 there was any fuzz.
1803 1874
1804 1875 If 'eolmode' is 'strict', the patch content and patched file are
1805 1876 read in binary mode. Otherwise, line endings are ignored when
1806 1877 patching then normalized according to 'eolmode'.
1807 1878 """
1808 1879 return _applydiff(ui, fp, patchfile, backend, store, strip=strip,
1809 1880 prefix=prefix, eolmode=eolmode)
1810 1881
1811 1882 def _applydiff(ui, fp, patcher, backend, store, strip=1, prefix='',
1812 1883 eolmode='strict'):
1813 1884
1814 1885 if prefix:
1815 1886 prefix = pathutil.canonpath(backend.repo.root, backend.repo.getcwd(),
1816 1887 prefix)
1817 1888 if prefix != '':
1818 1889 prefix += '/'
1819 1890 def pstrip(p):
1820 1891 return pathtransform(p, strip - 1, prefix)[1]
1821 1892
1822 1893 rejects = 0
1823 1894 err = 0
1824 1895 current_file = None
1825 1896
1826 1897 for state, values in iterhunks(fp):
1827 1898 if state == 'hunk':
1828 1899 if not current_file:
1829 1900 continue
1830 1901 ret = current_file.apply(values)
1831 1902 if ret > 0:
1832 1903 err = 1
1833 1904 elif state == 'file':
1834 1905 if current_file:
1835 1906 rejects += current_file.close()
1836 1907 current_file = None
1837 1908 afile, bfile, first_hunk, gp = values
1838 1909 if gp:
1839 1910 gp.path = pstrip(gp.path)
1840 1911 if gp.oldpath:
1841 1912 gp.oldpath = pstrip(gp.oldpath)
1842 1913 else:
1843 1914 gp = makepatchmeta(backend, afile, bfile, first_hunk, strip,
1844 1915 prefix)
1845 1916 if gp.op == 'RENAME':
1846 1917 backend.unlink(gp.oldpath)
1847 1918 if not first_hunk:
1848 1919 if gp.op == 'DELETE':
1849 1920 backend.unlink(gp.path)
1850 1921 continue
1851 1922 data, mode = None, None
1852 1923 if gp.op in ('RENAME', 'COPY'):
1853 1924 data, mode = store.getfile(gp.oldpath)[:2]
1854 1925 # FIXME: failing getfile has never been handled here
1855 1926 assert data is not None
1856 1927 if gp.mode:
1857 1928 mode = gp.mode
1858 1929 if gp.op == 'ADD':
1859 1930 # Added files without content have no hunk and
1860 1931 # must be created
1861 1932 data = ''
1862 1933 if data or mode:
1863 1934 if (gp.op in ('ADD', 'RENAME', 'COPY')
1864 1935 and backend.exists(gp.path)):
1865 1936 raise PatchError(_("cannot create %s: destination "
1866 1937 "already exists") % gp.path)
1867 1938 backend.setfile(gp.path, data, mode, gp.oldpath)
1868 1939 continue
1869 1940 try:
1870 1941 current_file = patcher(ui, gp, backend, store,
1871 1942 eolmode=eolmode)
1872 1943 except PatchError, inst:
1873 1944 ui.warn(str(inst) + '\n')
1874 1945 current_file = None
1875 1946 rejects += 1
1876 1947 continue
1877 1948 elif state == 'git':
1878 1949 for gp in values:
1879 1950 path = pstrip(gp.oldpath)
1880 1951 data, mode = backend.getfile(path)
1881 1952 if data is None:
1882 1953 # The error ignored here will trigger a getfile()
1883 1954 # error in a place more appropriate for error
1884 1955 # handling, and will not interrupt the patching
1885 1956 # process.
1886 1957 pass
1887 1958 else:
1888 1959 store.setfile(path, data, mode)
1889 1960 else:
1890 1961 raise util.Abort(_('unsupported parser state: %s') % state)
1891 1962
1892 1963 if current_file:
1893 1964 rejects += current_file.close()
1894 1965
1895 1966 if rejects:
1896 1967 return -1
1897 1968 return err
1898 1969
1899 1970 def _externalpatch(ui, repo, patcher, patchname, strip, files,
1900 1971 similarity):
1901 1972 """use <patcher> to apply <patchname> to the working directory.
1902 1973 returns whether patch was applied with fuzz factor."""
1903 1974
1904 1975 fuzz = False
1905 1976 args = []
1906 1977 cwd = repo.root
1907 1978 if cwd:
1908 1979 args.append('-d %s' % util.shellquote(cwd))
1909 1980 fp = util.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
1910 1981 util.shellquote(patchname)))
1911 1982 try:
1912 1983 for line in fp:
1913 1984 line = line.rstrip()
1914 1985 ui.note(line + '\n')
1915 1986 if line.startswith('patching file '):
1916 1987 pf = util.parsepatchoutput(line)
1917 1988 printed_file = False
1918 1989 files.add(pf)
1919 1990 elif line.find('with fuzz') >= 0:
1920 1991 fuzz = True
1921 1992 if not printed_file:
1922 1993 ui.warn(pf + '\n')
1923 1994 printed_file = True
1924 1995 ui.warn(line + '\n')
1925 1996 elif line.find('saving rejects to file') >= 0:
1926 1997 ui.warn(line + '\n')
1927 1998 elif line.find('FAILED') >= 0:
1928 1999 if not printed_file:
1929 2000 ui.warn(pf + '\n')
1930 2001 printed_file = True
1931 2002 ui.warn(line + '\n')
1932 2003 finally:
1933 2004 if files:
1934 2005 scmutil.marktouched(repo, files, similarity)
1935 2006 code = fp.close()
1936 2007 if code:
1937 2008 raise PatchError(_("patch command failed: %s") %
1938 2009 util.explainexit(code)[0])
1939 2010 return fuzz
1940 2011
1941 2012 def patchbackend(ui, backend, patchobj, strip, prefix, files=None,
1942 2013 eolmode='strict'):
1943 2014 if files is None:
1944 2015 files = set()
1945 2016 if eolmode is None:
1946 2017 eolmode = ui.config('patch', 'eol', 'strict')
1947 2018 if eolmode.lower() not in eolmodes:
1948 2019 raise util.Abort(_('unsupported line endings type: %s') % eolmode)
1949 2020 eolmode = eolmode.lower()
1950 2021
1951 2022 store = filestore()
1952 2023 try:
1953 2024 fp = open(patchobj, 'rb')
1954 2025 except TypeError:
1955 2026 fp = patchobj
1956 2027 try:
1957 2028 ret = applydiff(ui, fp, backend, store, strip=strip, prefix=prefix,
1958 2029 eolmode=eolmode)
1959 2030 finally:
1960 2031 if fp != patchobj:
1961 2032 fp.close()
1962 2033 files.update(backend.close())
1963 2034 store.close()
1964 2035 if ret < 0:
1965 2036 raise PatchError(_('patch failed to apply'))
1966 2037 return ret > 0
1967 2038
1968 2039 def internalpatch(ui, repo, patchobj, strip, prefix='', files=None,
1969 2040 eolmode='strict', similarity=0):
1970 2041 """use builtin patch to apply <patchobj> to the working directory.
1971 2042 returns whether patch was applied with fuzz factor."""
1972 2043 backend = workingbackend(ui, repo, similarity)
1973 2044 return patchbackend(ui, backend, patchobj, strip, prefix, files, eolmode)
1974 2045
1975 2046 def patchrepo(ui, repo, ctx, store, patchobj, strip, prefix, files=None,
1976 2047 eolmode='strict'):
1977 2048 backend = repobackend(ui, repo, ctx, store)
1978 2049 return patchbackend(ui, backend, patchobj, strip, prefix, files, eolmode)
1979 2050
1980 2051 def patch(ui, repo, patchname, strip=1, prefix='', files=None, eolmode='strict',
1981 2052 similarity=0):
1982 2053 """Apply <patchname> to the working directory.
1983 2054
1984 2055 'eolmode' specifies how end of lines should be handled. It can be:
1985 2056 - 'strict': inputs are read in binary mode, EOLs are preserved
1986 2057 - 'crlf': EOLs are ignored when patching and reset to CRLF
1987 2058 - 'lf': EOLs are ignored when patching and reset to LF
1988 2059 - None: get it from user settings, default to 'strict'
1989 2060 'eolmode' is ignored when using an external patcher program.
1990 2061
1991 2062 Returns whether patch was applied with fuzz factor.
1992 2063 """
1993 2064 patcher = ui.config('ui', 'patch')
1994 2065 if files is None:
1995 2066 files = set()
1996 2067 if patcher:
1997 2068 return _externalpatch(ui, repo, patcher, patchname, strip,
1998 2069 files, similarity)
1999 2070 return internalpatch(ui, repo, patchname, strip, prefix, files, eolmode,
2000 2071 similarity)
2001 2072
2002 2073 def changedfiles(ui, repo, patchpath, strip=1):
2003 2074 backend = fsbackend(ui, repo.root)
2004 2075 fp = open(patchpath, 'rb')
2005 2076 try:
2006 2077 changed = set()
2007 2078 for state, values in iterhunks(fp):
2008 2079 if state == 'file':
2009 2080 afile, bfile, first_hunk, gp = values
2010 2081 if gp:
2011 2082 gp.path = pathtransform(gp.path, strip - 1, '')[1]
2012 2083 if gp.oldpath:
2013 2084 gp.oldpath = pathtransform(gp.oldpath, strip - 1, '')[1]
2014 2085 else:
2015 2086 gp = makepatchmeta(backend, afile, bfile, first_hunk, strip,
2016 2087 '')
2017 2088 changed.add(gp.path)
2018 2089 if gp.op == 'RENAME':
2019 2090 changed.add(gp.oldpath)
2020 2091 elif state not in ('hunk', 'git'):
2021 2092 raise util.Abort(_('unsupported parser state: %s') % state)
2022 2093 return changed
2023 2094 finally:
2024 2095 fp.close()
2025 2096
2026 2097 class GitDiffRequired(Exception):
2027 2098 pass
2028 2099
2029 2100 def diffallopts(ui, opts=None, untrusted=False, section='diff'):
2030 2101 '''return diffopts with all features supported and parsed'''
2031 2102 return difffeatureopts(ui, opts=opts, untrusted=untrusted, section=section,
2032 2103 git=True, whitespace=True, formatchanging=True)
2033 2104
2034 2105 diffopts = diffallopts
2035 2106
2036 2107 def difffeatureopts(ui, opts=None, untrusted=False, section='diff', git=False,
2037 2108 whitespace=False, formatchanging=False):
2038 2109 '''return diffopts with only opted-in features parsed
2039 2110
2040 2111 Features:
2041 2112 - git: git-style diffs
2042 2113 - whitespace: whitespace options like ignoreblanklines and ignorews
2043 2114 - formatchanging: options that will likely break or cause correctness issues
2044 2115 with most diff parsers
2045 2116 '''
2046 2117 def get(key, name=None, getter=ui.configbool, forceplain=None):
2047 2118 if opts:
2048 2119 v = opts.get(key)
2049 2120 if v:
2050 2121 return v
2051 2122 if forceplain is not None and ui.plain():
2052 2123 return forceplain
2053 2124 return getter(section, name or key, None, untrusted=untrusted)
2054 2125
2055 2126 # core options, expected to be understood by every diff parser
2056 2127 buildopts = {
2057 2128 'nodates': get('nodates'),
2058 2129 'showfunc': get('show_function', 'showfunc'),
2059 2130 'context': get('unified', getter=ui.config),
2060 2131 }
2061 2132
2062 2133 if git:
2063 2134 buildopts['git'] = get('git')
2064 2135 if whitespace:
2065 2136 buildopts['ignorews'] = get('ignore_all_space', 'ignorews')
2066 2137 buildopts['ignorewsamount'] = get('ignore_space_change',
2067 2138 'ignorewsamount')
2068 2139 buildopts['ignoreblanklines'] = get('ignore_blank_lines',
2069 2140 'ignoreblanklines')
2070 2141 if formatchanging:
2071 2142 buildopts['text'] = opts and opts.get('text')
2072 2143 buildopts['nobinary'] = get('nobinary')
2073 2144 buildopts['noprefix'] = get('noprefix', forceplain=False)
2074 2145
2075 2146 return mdiff.diffopts(**buildopts)
2076 2147
2077 2148 def diff(repo, node1=None, node2=None, match=None, changes=None, opts=None,
2078 2149 losedatafn=None, prefix='', relroot=''):
2079 2150 '''yields diff of changes to files between two nodes, or node and
2080 2151 working directory.
2081 2152
2082 2153 if node1 is None, use first dirstate parent instead.
2083 2154 if node2 is None, compare node1 with working directory.
2084 2155
2085 2156 losedatafn(**kwarg) is a callable run when opts.upgrade=True and
2086 2157 every time some change cannot be represented with the current
2087 2158 patch format. Return False to upgrade to git patch format, True to
2088 2159 accept the loss or raise an exception to abort the diff. It is
2089 2160 called with the name of current file being diffed as 'fn'. If set
2090 2161 to None, patches will always be upgraded to git format when
2091 2162 necessary.
2092 2163
2093 2164 prefix is a filename prefix that is prepended to all filenames on
2094 2165 display (used for subrepos).
2095 2166
2096 2167 relroot, if not empty, must be normalized with a trailing /. Any match
2097 2168 patterns that fall outside it will be ignored.'''
2098 2169
2099 2170 if opts is None:
2100 2171 opts = mdiff.defaultopts
2101 2172
2102 2173 if not node1 and not node2:
2103 2174 node1 = repo.dirstate.p1()
2104 2175
2105 2176 def lrugetfilectx():
2106 2177 cache = {}
2107 2178 order = collections.deque()
2108 2179 def getfilectx(f, ctx):
2109 2180 fctx = ctx.filectx(f, filelog=cache.get(f))
2110 2181 if f not in cache:
2111 2182 if len(cache) > 20:
2112 2183 del cache[order.popleft()]
2113 2184 cache[f] = fctx.filelog()
2114 2185 else:
2115 2186 order.remove(f)
2116 2187 order.append(f)
2117 2188 return fctx
2118 2189 return getfilectx
2119 2190 getfilectx = lrugetfilectx()
2120 2191
2121 2192 ctx1 = repo[node1]
2122 2193 ctx2 = repo[node2]
2123 2194
2124 2195 relfiltered = False
2125 2196 if relroot != '' and match.always():
2126 2197 # as a special case, create a new matcher with just the relroot
2127 2198 pats = [relroot]
2128 2199 match = scmutil.match(ctx2, pats, default='path')
2129 2200 relfiltered = True
2130 2201
2131 2202 if not changes:
2132 2203 changes = repo.status(ctx1, ctx2, match=match)
2133 2204 modified, added, removed = changes[:3]
2134 2205
2135 2206 if not modified and not added and not removed:
2136 2207 return []
2137 2208
2138 2209 if repo.ui.debugflag:
2139 2210 hexfunc = hex
2140 2211 else:
2141 2212 hexfunc = short
2142 2213 revs = [hexfunc(node) for node in [ctx1.node(), ctx2.node()] if node]
2143 2214
2144 2215 copy = {}
2145 2216 if opts.git or opts.upgrade:
2146 2217 copy = copies.pathcopies(ctx1, ctx2, match=match)
2147 2218
2148 2219 if relroot is not None:
2149 2220 if not relfiltered:
2150 2221 # XXX this would ideally be done in the matcher, but that is
2151 2222 # generally meant to 'or' patterns, not 'and' them. In this case we
2152 2223 # need to 'and' all the patterns from the matcher with relroot.
2153 2224 def filterrel(l):
2154 2225 return [f for f in l if f.startswith(relroot)]
2155 2226 modified = filterrel(modified)
2156 2227 added = filterrel(added)
2157 2228 removed = filterrel(removed)
2158 2229 relfiltered = True
2159 2230 # filter out copies where either side isn't inside the relative root
2160 2231 copy = dict(((dst, src) for (dst, src) in copy.iteritems()
2161 2232 if dst.startswith(relroot)
2162 2233 and src.startswith(relroot)))
2163 2234
2164 2235 def difffn(opts, losedata):
2165 2236 return trydiff(repo, revs, ctx1, ctx2, modified, added, removed,
2166 2237 copy, getfilectx, opts, losedata, prefix, relroot)
2167 2238 if opts.upgrade and not opts.git:
2168 2239 try:
2169 2240 def losedata(fn):
2170 2241 if not losedatafn or not losedatafn(fn=fn):
2171 2242 raise GitDiffRequired
2172 2243 # Buffer the whole output until we are sure it can be generated
2173 2244 return list(difffn(opts.copy(git=False), losedata))
2174 2245 except GitDiffRequired:
2175 2246 return difffn(opts.copy(git=True), None)
2176 2247 else:
2177 2248 return difffn(opts, None)
2178 2249
2179 2250 def difflabel(func, *args, **kw):
2180 2251 '''yields 2-tuples of (output, label) based on the output of func()'''
2181 2252 headprefixes = [('diff', 'diff.diffline'),
2182 2253 ('copy', 'diff.extended'),
2183 2254 ('rename', 'diff.extended'),
2184 2255 ('old', 'diff.extended'),
2185 2256 ('new', 'diff.extended'),
2186 2257 ('deleted', 'diff.extended'),
2187 2258 ('---', 'diff.file_a'),
2188 2259 ('+++', 'diff.file_b')]
2189 2260 textprefixes = [('@', 'diff.hunk'),
2190 2261 ('-', 'diff.deleted'),
2191 2262 ('+', 'diff.inserted')]
2192 2263 head = False
2193 2264 for chunk in func(*args, **kw):
2194 2265 lines = chunk.split('\n')
2195 2266 for i, line in enumerate(lines):
2196 2267 if i != 0:
2197 2268 yield ('\n', '')
2198 2269 if head:
2199 2270 if line.startswith('@'):
2200 2271 head = False
2201 2272 else:
2202 2273 if line and line[0] not in ' +-@\\':
2203 2274 head = True
2204 2275 stripline = line
2205 2276 diffline = False
2206 2277 if not head and line and line[0] in '+-':
2207 2278 # highlight tabs and trailing whitespace, but only in
2208 2279 # changed lines
2209 2280 stripline = line.rstrip()
2210 2281 diffline = True
2211 2282
2212 2283 prefixes = textprefixes
2213 2284 if head:
2214 2285 prefixes = headprefixes
2215 2286 for prefix, label in prefixes:
2216 2287 if stripline.startswith(prefix):
2217 2288 if diffline:
2218 2289 for token in tabsplitter.findall(stripline):
2219 2290 if '\t' == token[0]:
2220 2291 yield (token, 'diff.tab')
2221 2292 else:
2222 2293 yield (token, label)
2223 2294 else:
2224 2295 yield (stripline, label)
2225 2296 break
2226 2297 else:
2227 2298 yield (line, '')
2228 2299 if line != stripline:
2229 2300 yield (line[len(stripline):], 'diff.trailingwhitespace')
2230 2301
2231 2302 def diffui(*args, **kw):
2232 2303 '''like diff(), but yields 2-tuples of (output, label) for ui.write()'''
2233 2304 return difflabel(diff, *args, **kw)
2234 2305
2235 2306 def _filepairs(ctx1, modified, added, removed, copy, opts):
2236 2307 '''generates tuples (f1, f2, copyop), where f1 is the name of the file
2237 2308 before and f2 is the the name after. For added files, f1 will be None,
2238 2309 and for removed files, f2 will be None. copyop may be set to None, 'copy'
2239 2310 or 'rename' (the latter two only if opts.git is set).'''
2240 2311 gone = set()
2241 2312
2242 2313 copyto = dict([(v, k) for k, v in copy.items()])
2243 2314
2244 2315 addedset, removedset = set(added), set(removed)
2245 2316 # Fix up added, since merged-in additions appear as
2246 2317 # modifications during merges
2247 2318 for f in modified:
2248 2319 if f not in ctx1:
2249 2320 addedset.add(f)
2250 2321
2251 2322 for f in sorted(modified + added + removed):
2252 2323 copyop = None
2253 2324 f1, f2 = f, f
2254 2325 if f in addedset:
2255 2326 f1 = None
2256 2327 if f in copy:
2257 2328 if opts.git:
2258 2329 f1 = copy[f]
2259 2330 if f1 in removedset and f1 not in gone:
2260 2331 copyop = 'rename'
2261 2332 gone.add(f1)
2262 2333 else:
2263 2334 copyop = 'copy'
2264 2335 elif f in removedset:
2265 2336 f2 = None
2266 2337 if opts.git:
2267 2338 # have we already reported a copy above?
2268 2339 if (f in copyto and copyto[f] in addedset
2269 2340 and copy[copyto[f]] == f):
2270 2341 continue
2271 2342 yield f1, f2, copyop
2272 2343
2273 2344 def trydiff(repo, revs, ctx1, ctx2, modified, added, removed,
2274 2345 copy, getfilectx, opts, losedatafn, prefix, relroot):
2275 2346 '''given input data, generate a diff and yield it in blocks
2276 2347
2277 2348 If generating a diff would lose data like flags or binary data and
2278 2349 losedatafn is not None, it will be called.
2279 2350
2280 2351 relroot is removed and prefix is added to every path in the diff output.
2281 2352
2282 2353 If relroot is not empty, this function expects every path in modified,
2283 2354 added, removed and copy to start with it.'''
2284 2355
2285 2356 def gitindex(text):
2286 2357 if not text:
2287 2358 text = ""
2288 2359 l = len(text)
2289 2360 s = util.sha1('blob %d\0' % l)
2290 2361 s.update(text)
2291 2362 return s.hexdigest()
2292 2363
2293 2364 if opts.noprefix:
2294 2365 aprefix = bprefix = ''
2295 2366 else:
2296 2367 aprefix = 'a/'
2297 2368 bprefix = 'b/'
2298 2369
2299 2370 def diffline(f, revs):
2300 2371 revinfo = ' '.join(["-r %s" % rev for rev in revs])
2301 2372 return 'diff %s %s' % (revinfo, f)
2302 2373
2303 2374 date1 = util.datestr(ctx1.date())
2304 2375 date2 = util.datestr(ctx2.date())
2305 2376
2306 2377 gitmode = {'l': '120000', 'x': '100755', '': '100644'}
2307 2378
2308 2379 if relroot != '' and (repo.ui.configbool('devel', 'all')
2309 2380 or repo.ui.configbool('devel', 'check-relroot')):
2310 2381 for f in modified + added + removed + copy.keys() + copy.values():
2311 2382 if f is not None and not f.startswith(relroot):
2312 2383 raise AssertionError(
2313 2384 "file %s doesn't start with relroot %s" % (f, relroot))
2314 2385
2315 2386 for f1, f2, copyop in _filepairs(
2316 2387 ctx1, modified, added, removed, copy, opts):
2317 2388 content1 = None
2318 2389 content2 = None
2319 2390 flag1 = None
2320 2391 flag2 = None
2321 2392 if f1:
2322 2393 content1 = getfilectx(f1, ctx1).data()
2323 2394 if opts.git or losedatafn:
2324 2395 flag1 = ctx1.flags(f1)
2325 2396 if f2:
2326 2397 content2 = getfilectx(f2, ctx2).data()
2327 2398 if opts.git or losedatafn:
2328 2399 flag2 = ctx2.flags(f2)
2329 2400 binary = False
2330 2401 if opts.git or losedatafn:
2331 2402 binary = util.binary(content1) or util.binary(content2)
2332 2403
2333 2404 if losedatafn and not opts.git:
2334 2405 if (binary or
2335 2406 # copy/rename
2336 2407 f2 in copy or
2337 2408 # empty file creation
2338 2409 (not f1 and not content2) or
2339 2410 # empty file deletion
2340 2411 (not content1 and not f2) or
2341 2412 # create with flags
2342 2413 (not f1 and flag2) or
2343 2414 # change flags
2344 2415 (f1 and f2 and flag1 != flag2)):
2345 2416 losedatafn(f2 or f1)
2346 2417
2347 2418 path1 = f1 or f2
2348 2419 path2 = f2 or f1
2349 2420 path1 = posixpath.join(prefix, path1[len(relroot):])
2350 2421 path2 = posixpath.join(prefix, path2[len(relroot):])
2351 2422 header = []
2352 2423 if opts.git:
2353 2424 header.append('diff --git %s%s %s%s' %
2354 2425 (aprefix, path1, bprefix, path2))
2355 2426 if not f1: # added
2356 2427 header.append('new file mode %s' % gitmode[flag2])
2357 2428 elif not f2: # removed
2358 2429 header.append('deleted file mode %s' % gitmode[flag1])
2359 2430 else: # modified/copied/renamed
2360 2431 mode1, mode2 = gitmode[flag1], gitmode[flag2]
2361 2432 if mode1 != mode2:
2362 2433 header.append('old mode %s' % mode1)
2363 2434 header.append('new mode %s' % mode2)
2364 2435 if copyop is not None:
2365 2436 header.append('%s from %s' % (copyop, path1))
2366 2437 header.append('%s to %s' % (copyop, path2))
2367 2438 elif revs and not repo.ui.quiet:
2368 2439 header.append(diffline(path1, revs))
2369 2440
2370 2441 if binary and opts.git and not opts.nobinary:
2371 2442 text = mdiff.b85diff(content1, content2)
2372 2443 if text:
2373 2444 header.append('index %s..%s' %
2374 2445 (gitindex(content1), gitindex(content2)))
2375 2446 else:
2376 2447 text = mdiff.unidiff(content1, date1,
2377 2448 content2, date2,
2378 2449 path1, path2, opts=opts)
2379 2450 if header and (text or len(header) > 1):
2380 2451 yield '\n'.join(header) + '\n'
2381 2452 if text:
2382 2453 yield text
2383 2454
2384 2455 def diffstatsum(stats):
2385 2456 maxfile, maxtotal, addtotal, removetotal, binary = 0, 0, 0, 0, False
2386 2457 for f, a, r, b in stats:
2387 2458 maxfile = max(maxfile, encoding.colwidth(f))
2388 2459 maxtotal = max(maxtotal, a + r)
2389 2460 addtotal += a
2390 2461 removetotal += r
2391 2462 binary = binary or b
2392 2463
2393 2464 return maxfile, maxtotal, addtotal, removetotal, binary
2394 2465
2395 2466 def diffstatdata(lines):
2396 2467 diffre = re.compile('^diff .*-r [a-z0-9]+\s(.*)$')
2397 2468
2398 2469 results = []
2399 2470 filename, adds, removes, isbinary = None, 0, 0, False
2400 2471
2401 2472 def addresult():
2402 2473 if filename:
2403 2474 results.append((filename, adds, removes, isbinary))
2404 2475
2405 2476 for line in lines:
2406 2477 if line.startswith('diff'):
2407 2478 addresult()
2408 2479 # set numbers to 0 anyway when starting new file
2409 2480 adds, removes, isbinary = 0, 0, False
2410 2481 if line.startswith('diff --git a/'):
2411 2482 filename = gitre.search(line).group(2)
2412 2483 elif line.startswith('diff -r'):
2413 2484 # format: "diff -r ... -r ... filename"
2414 2485 filename = diffre.search(line).group(1)
2415 2486 elif line.startswith('+') and not line.startswith('+++ '):
2416 2487 adds += 1
2417 2488 elif line.startswith('-') and not line.startswith('--- '):
2418 2489 removes += 1
2419 2490 elif (line.startswith('GIT binary patch') or
2420 2491 line.startswith('Binary file')):
2421 2492 isbinary = True
2422 2493 addresult()
2423 2494 return results
2424 2495
2425 2496 def diffstat(lines, width=80, git=False):
2426 2497 output = []
2427 2498 stats = diffstatdata(lines)
2428 2499 maxname, maxtotal, totaladds, totalremoves, hasbinary = diffstatsum(stats)
2429 2500
2430 2501 countwidth = len(str(maxtotal))
2431 2502 if hasbinary and countwidth < 3:
2432 2503 countwidth = 3
2433 2504 graphwidth = width - countwidth - maxname - 6
2434 2505 if graphwidth < 10:
2435 2506 graphwidth = 10
2436 2507
2437 2508 def scale(i):
2438 2509 if maxtotal <= graphwidth:
2439 2510 return i
2440 2511 # If diffstat runs out of room it doesn't print anything,
2441 2512 # which isn't very useful, so always print at least one + or -
2442 2513 # if there were at least some changes.
2443 2514 return max(i * graphwidth // maxtotal, int(bool(i)))
2444 2515
2445 2516 for filename, adds, removes, isbinary in stats:
2446 2517 if isbinary:
2447 2518 count = 'Bin'
2448 2519 else:
2449 2520 count = adds + removes
2450 2521 pluses = '+' * scale(adds)
2451 2522 minuses = '-' * scale(removes)
2452 2523 output.append(' %s%s | %*s %s%s\n' %
2453 2524 (filename, ' ' * (maxname - encoding.colwidth(filename)),
2454 2525 countwidth, count, pluses, minuses))
2455 2526
2456 2527 if stats:
2457 2528 output.append(_(' %d files changed, %d insertions(+), '
2458 2529 '%d deletions(-)\n')
2459 2530 % (len(stats), totaladds, totalremoves))
2460 2531
2461 2532 return ''.join(output)
2462 2533
2463 2534 def diffstatui(*args, **kw):
2464 2535 '''like diffstat(), but yields 2-tuples of (output, label) for
2465 2536 ui.write()
2466 2537 '''
2467 2538
2468 2539 for line in diffstat(*args, **kw).splitlines():
2469 2540 if line and line[-1] in '+-':
2470 2541 name, graph = line.rsplit(' ', 1)
2471 2542 yield (name + ' ', '')
2472 2543 m = re.search(r'\++', graph)
2473 2544 if m:
2474 2545 yield (m.group(0), 'diffstat.inserted')
2475 2546 m = re.search(r'-+', graph)
2476 2547 if m:
2477 2548 yield (m.group(0), 'diffstat.deleted')
2478 2549 else:
2479 2550 yield (line, '')
2480 2551 yield ('\n', '')
@@ -1,322 +1,393 b''
1 1 Revert interactive tests
2 2 1 add and commit file f
3 3 2 add commit file folder1/g
4 4 3 add and commit file folder2/h
5 5 4 add and commit file folder1/i
6 6 5 commit change to file f
7 7 6 commit changes to files folder1/g folder2/h
8 8 7 commit changes to files folder1/g folder2/h
9 9 8 revert interactive to commit id 2 (line 3 above), check that folder1/i is removed and
10 10 9 make workdir match 7
11 11 10 run the same test than 8 from within folder1 and check same expectations
12 12
13 13 $ cat <<EOF >> $HGRCPATH
14 14 > [ui]
15 15 > interactive = true
16 16 > [extensions]
17 17 > record =
18 18 > EOF
19 19
20 20
21 21 $ mkdir -p a/folder1 a/folder2
22 22 $ cd a
23 23 $ hg init
24 24 >>> open('f', 'wb').write("1\n2\n3\n4\n5\n")
25 25 $ hg add f ; hg commit -m "adding f"
26 26 $ cat f > folder1/g ; hg add folder1/g ; hg commit -m "adding folder1/g"
27 27 $ cat f > folder2/h ; hg add folder2/h ; hg commit -m "adding folder2/h"
28 28 $ cat f > folder1/i ; hg add folder1/i ; hg commit -m "adding folder1/i"
29 29 >>> open('f', 'wb').write("a\n1\n2\n3\n4\n5\nb\n")
30 30 $ hg commit -m "modifying f"
31 31 >>> open('folder1/g', 'wb').write("c\n1\n2\n3\n4\n5\nd\n")
32 32 $ hg commit -m "modifying folder1/g"
33 33 >>> open('folder2/h', 'wb').write("e\n1\n2\n3\n4\n5\nf\n")
34 34 $ hg commit -m "modifying folder2/h"
35 35 $ hg tip
36 36 changeset: 6:59dd6e4ab63a
37 37 tag: tip
38 38 user: test
39 39 date: Thu Jan 01 00:00:00 1970 +0000
40 40 summary: modifying folder2/h
41 41
42 42 $ hg revert -i -r 2 --all -- << EOF
43 43 > y
44 44 > y
45 45 > y
46 46 > y
47 47 > y
48 48 > n
49 49 > n
50 50 > EOF
51 51 reverting f
52 52 reverting folder1/g (glob)
53 53 removing folder1/i (glob)
54 54 reverting folder2/h (glob)
55 55 diff --git a/f b/f
56 56 2 hunks, 2 lines changed
57 57 examine changes to 'f'? [Ynesfdaq?] y
58 58
59 59 @@ -1,6 +1,5 @@
60 60 -a
61 61 1
62 62 2
63 63 3
64 64 4
65 65 5
66 66 record change 1/6 to 'f'? [Ynesfdaq?] y
67 67
68 68 @@ -2,6 +1,5 @@
69 69 1
70 70 2
71 71 3
72 72 4
73 73 5
74 74 -b
75 75 record change 2/6 to 'f'? [Ynesfdaq?] y
76 76
77 77 diff --git a/folder1/g b/folder1/g
78 78 2 hunks, 2 lines changed
79 79 examine changes to 'folder1/g'? [Ynesfdaq?] y
80 80
81 81 @@ -1,6 +1,5 @@
82 82 -c
83 83 1
84 84 2
85 85 3
86 86 4
87 87 5
88 88 record change 3/6 to 'folder1/g'? [Ynesfdaq?] y
89 89
90 90 @@ -2,6 +1,5 @@
91 91 1
92 92 2
93 93 3
94 94 4
95 95 5
96 96 -d
97 97 record change 4/6 to 'folder1/g'? [Ynesfdaq?] n
98 98
99 99 diff --git a/folder2/h b/folder2/h
100 100 2 hunks, 2 lines changed
101 101 examine changes to 'folder2/h'? [Ynesfdaq?] n
102 102
103 103 $ cat f
104 104 1
105 105 2
106 106 3
107 107 4
108 108 5
109 109 $ cat folder1/g
110 110 1
111 111 2
112 112 3
113 113 4
114 114 5
115 115 d
116 116 $ cat folder2/h
117 117 e
118 118 1
119 119 2
120 120 3
121 121 4
122 122 5
123 123 f
124 124
125 125 Test that --interactive lift the need for --all
126 126
127 127 $ echo q | hg revert -i -r 2
128 128 reverting folder1/g (glob)
129 129 reverting folder2/h (glob)
130 130 diff --git a/folder1/g b/folder1/g
131 131 1 hunks, 1 lines changed
132 132 examine changes to 'folder1/g'? [Ynesfdaq?] q
133 133
134 134 abort: user quit
135 135 [255]
136 136 $ rm folder1/g.orig
137 137
138 138
139 139 $ hg update -C 6
140 140 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
141 141 $ hg revert -i -r 2 --all -- << EOF
142 142 > y
143 143 > y
144 144 > y
145 145 > y
146 146 > y
147 147 > n
148 148 > n
149 149 > EOF
150 150 reverting f
151 151 reverting folder1/g (glob)
152 152 removing folder1/i (glob)
153 153 reverting folder2/h (glob)
154 154 diff --git a/f b/f
155 155 2 hunks, 2 lines changed
156 156 examine changes to 'f'? [Ynesfdaq?] y
157 157
158 158 @@ -1,6 +1,5 @@
159 159 -a
160 160 1
161 161 2
162 162 3
163 163 4
164 164 5
165 165 record change 1/6 to 'f'? [Ynesfdaq?] y
166 166
167 167 @@ -2,6 +1,5 @@
168 168 1
169 169 2
170 170 3
171 171 4
172 172 5
173 173 -b
174 174 record change 2/6 to 'f'? [Ynesfdaq?] y
175 175
176 176 diff --git a/folder1/g b/folder1/g
177 177 2 hunks, 2 lines changed
178 178 examine changes to 'folder1/g'? [Ynesfdaq?] y
179 179
180 180 @@ -1,6 +1,5 @@
181 181 -c
182 182 1
183 183 2
184 184 3
185 185 4
186 186 5
187 187 record change 3/6 to 'folder1/g'? [Ynesfdaq?] y
188 188
189 189 @@ -2,6 +1,5 @@
190 190 1
191 191 2
192 192 3
193 193 4
194 194 5
195 195 -d
196 196 record change 4/6 to 'folder1/g'? [Ynesfdaq?] n
197 197
198 198 diff --git a/folder2/h b/folder2/h
199 199 2 hunks, 2 lines changed
200 200 examine changes to 'folder2/h'? [Ynesfdaq?] n
201 201
202 202 $ cat f
203 203 1
204 204 2
205 205 3
206 206 4
207 207 5
208 208 $ cat folder1/g
209 209 1
210 210 2
211 211 3
212 212 4
213 213 5
214 214 d
215 215 $ cat folder2/h
216 216 e
217 217 1
218 218 2
219 219 3
220 220 4
221 221 5
222 222 f
223 223 $ hg st
224 224 M f
225 225 M folder1/g
226 226 R folder1/i
227 227 $ hg revert --interactive f << EOF
228 228 > y
229 229 > y
230 230 > n
231 231 > n
232 232 > EOF
233 233 diff --git a/f b/f
234 234 2 hunks, 2 lines changed
235 235 examine changes to 'f'? [Ynesfdaq?] y
236 236
237 237 @@ -1,5 +1,6 @@
238 238 +a
239 239 1
240 240 2
241 241 3
242 242 4
243 243 5
244 244 record change 1/2 to 'f'? [Ynesfdaq?] y
245 245
246 246 @@ -1,5 +2,6 @@
247 247 1
248 248 2
249 249 3
250 250 4
251 251 5
252 252 +b
253 253 record change 2/2 to 'f'? [Ynesfdaq?] n
254 254
255 255 $ hg st
256 256 M f
257 257 M folder1/g
258 258 R folder1/i
259 259 ? f.orig
260 260 $ cat f
261 261 a
262 262 1
263 263 2
264 264 3
265 265 4
266 266 5
267 267 $ cat f.orig
268 268 1
269 269 2
270 270 3
271 271 4
272 272 5
273 273 $ rm f.orig
274 274 $ hg update -C .
275 275 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
276 276
277 277 Check editing files newly added by a revert
278 278
279 279 1) Create a dummy editor changing 1 to 42
280 280 $ cat > $TESTTMP/editor.sh << '__EOF__'
281 281 > cat "$1" | sed "s/1/42/g" > tt
282 282 > mv tt "$1"
283 283 > __EOF__
284 284
285 285 2) Remove f
286 286 $ hg rm f
287 287 $ hg commit -m "remove f"
288 288
289 289 3) Do another commit on top
290 290 $ touch k; hg add k
291 291 $ hg commit -m "add k"
292 292 $ hg st
293 293
294 294 4) Use interactive revert to recover f and change it on the fly
295 295 $ HGEDITOR="\"sh\" \"${TESTTMP}/editor.sh\"" hg revert -i -r ".^^" <<EOF
296 296 > y
297 297 > e
298 298 > EOF
299 299 adding f
300 300 removing k
301 301 diff --git a/f b/f
302 302 new file mode 100644
303 303 examine changes to 'f'? [Ynesfdaq?] y
304 304
305 305 @@ -0,0 +1,7 @@
306 306 +a
307 307 +1
308 308 +2
309 309 +3
310 310 +4
311 311 +5
312 312 +b
313 313 record this change to 'f'? [Ynesfdaq?] e
314 314
315 315 $ cat f
316 316 a
317 317 42
318 318 2
319 319 3
320 320 4
321 321 5
322 322 b
323
324 Check the experimental config to invert the selection:
325 $ cat <<EOF >> $HGRCPATH
326 > [experimental]
327 > revertalternateinteractivemode=True
328 > EOF
329
330
331 $ hg up -C .
332 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
333 $ printf 'firstline\nc\n1\n2\n3\n 3\n5\nd\nlastline\n' > folder1/g
334 $ hg diff --nodates
335 diff -r 5a858e056dc0 folder1/g
336 --- a/folder1/g
337 +++ b/folder1/g
338 @@ -1,7 +1,9 @@
339 +firstline
340 c
341 1
342 2
343 3
344 -4
345 + 3
346 5
347 d
348 +lastline
349 $ hg revert -i <<EOF
350 > y
351 > y
352 > y
353 > n
354 > EOF
355 reverting folder1/g (glob)
356 diff --git a/folder1/g b/folder1/g
357 3 hunks, 3 lines changed
358 examine changes to 'folder1/g'? [Ynesfdaq?] y
359
360 @@ -1,4 +1,5 @@
361 +firstline
362 c
363 1
364 2
365 3
366 record change 1/3 to 'folder1/g'? [Ynesfdaq?] y
367
368 @@ -1,7 +2,7 @@
369 c
370 1
371 2
372 3
373 -4
374 + 3
375 5
376 d
377 record change 2/3 to 'folder1/g'? [Ynesfdaq?] y
378
379 @@ -6,2 +7,3 @@
380 5
381 d
382 +lastline
383 record change 3/3 to 'folder1/g'? [Ynesfdaq?] n
384
385 $ hg diff --nodates
386 diff -r 5a858e056dc0 folder1/g
387 --- a/folder1/g
388 +++ b/folder1/g
389 @@ -5,3 +5,4 @@
390 4
391 5
392 d
393 +lastline
General Comments 0
You need to be logged in to leave comments. Login now