##// END OF EJS Templates
patch: move 'extract' return to a dictionnary...
Pierre-Yves David -
r26547:b9be8ab6 default
parent child Browse files
Show More
@@ -1,3398 +1,3405 b''
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import hex, bin, nullid, nullrev, short
9 9 from i18n import _
10 10 import os, sys, errno, re, tempfile, cStringIO, shutil
11 11 import util, scmutil, templater, patch, error, templatekw, revlog, copies
12 12 import match as matchmod
13 13 import repair, graphmod, revset, phases, obsolete, pathutil
14 14 import changelog
15 15 import bookmarks
16 16 import encoding
17 17 import formatter
18 18 import crecord as crecordmod
19 19 import lock as lockmod
20 20
21 21 def ishunk(x):
22 22 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
23 23 return isinstance(x, hunkclasses)
24 24
25 25 def newandmodified(chunks, originalchunks):
26 26 newlyaddedandmodifiedfiles = set()
27 27 for chunk in chunks:
28 28 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
29 29 originalchunks:
30 30 newlyaddedandmodifiedfiles.add(chunk.header.filename())
31 31 return newlyaddedandmodifiedfiles
32 32
33 33 def parsealiases(cmd):
34 34 return cmd.lstrip("^").split("|")
35 35
36 36 def setupwrapcolorwrite(ui):
37 37 # wrap ui.write so diff output can be labeled/colorized
38 38 def wrapwrite(orig, *args, **kw):
39 39 label = kw.pop('label', '')
40 40 for chunk, l in patch.difflabel(lambda: args):
41 41 orig(chunk, label=label + l)
42 42
43 43 oldwrite = ui.write
44 44 def wrap(*args, **kwargs):
45 45 return wrapwrite(oldwrite, *args, **kwargs)
46 46 setattr(ui, 'write', wrap)
47 47 return oldwrite
48 48
49 49 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
50 50 if usecurses:
51 51 if testfile:
52 52 recordfn = crecordmod.testdecorator(testfile,
53 53 crecordmod.testchunkselector)
54 54 else:
55 55 recordfn = crecordmod.chunkselector
56 56
57 57 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
58 58
59 59 else:
60 60 return patch.filterpatch(ui, originalhunks, operation)
61 61
62 62 def recordfilter(ui, originalhunks, operation=None):
63 63 """ Prompts the user to filter the originalhunks and return a list of
64 64 selected hunks.
65 65 *operation* is used for ui purposes to indicate the user
66 66 what kind of filtering they are doing: reverting, commiting, shelving, etc.
67 67 *operation* has to be a translated string.
68 68 """
69 69 usecurses = ui.configbool('experimental', 'crecord', False)
70 70 testfile = ui.config('experimental', 'crecordtest', None)
71 71 oldwrite = setupwrapcolorwrite(ui)
72 72 try:
73 73 newchunks = filterchunks(ui, originalhunks, usecurses, testfile,
74 74 operation)
75 75 finally:
76 76 ui.write = oldwrite
77 77 return newchunks
78 78
79 79 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
80 80 filterfn, *pats, **opts):
81 81 import merge as mergemod
82 82
83 83 if not ui.interactive():
84 84 if cmdsuggest:
85 85 msg = _('running non-interactively, use %s instead') % cmdsuggest
86 86 else:
87 87 msg = _('running non-interactively')
88 88 raise util.Abort(msg)
89 89
90 90 # make sure username is set before going interactive
91 91 if not opts.get('user'):
92 92 ui.username() # raise exception, username not provided
93 93
94 94 def recordfunc(ui, repo, message, match, opts):
95 95 """This is generic record driver.
96 96
97 97 Its job is to interactively filter local changes, and
98 98 accordingly prepare working directory into a state in which the
99 99 job can be delegated to a non-interactive commit command such as
100 100 'commit' or 'qrefresh'.
101 101
102 102 After the actual job is done by non-interactive command, the
103 103 working directory is restored to its original state.
104 104
105 105 In the end we'll record interesting changes, and everything else
106 106 will be left in place, so the user can continue working.
107 107 """
108 108
109 109 checkunfinished(repo, commit=True)
110 110 merge = len(repo[None].parents()) > 1
111 111 if merge:
112 112 raise util.Abort(_('cannot partially commit a merge '
113 113 '(use "hg commit" instead)'))
114 114
115 115 status = repo.status(match=match)
116 116 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
117 117 diffopts.nodates = True
118 118 diffopts.git = True
119 119 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
120 120 originalchunks = patch.parsepatch(originaldiff)
121 121
122 122 # 1. filter patch, so we have intending-to apply subset of it
123 123 try:
124 124 chunks = filterfn(ui, originalchunks)
125 125 except patch.PatchError as err:
126 126 raise util.Abort(_('error parsing patch: %s') % err)
127 127
128 128 # We need to keep a backup of files that have been newly added and
129 129 # modified during the recording process because there is a previous
130 130 # version without the edit in the workdir
131 131 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
132 132 contenders = set()
133 133 for h in chunks:
134 134 try:
135 135 contenders.update(set(h.files()))
136 136 except AttributeError:
137 137 pass
138 138
139 139 changed = status.modified + status.added + status.removed
140 140 newfiles = [f for f in changed if f in contenders]
141 141 if not newfiles:
142 142 ui.status(_('no changes to record\n'))
143 143 return 0
144 144
145 145 modified = set(status.modified)
146 146
147 147 # 2. backup changed files, so we can restore them in the end
148 148
149 149 if backupall:
150 150 tobackup = changed
151 151 else:
152 152 tobackup = [f for f in newfiles if f in modified or f in \
153 153 newlyaddedandmodifiedfiles]
154 154 backups = {}
155 155 if tobackup:
156 156 backupdir = repo.join('record-backups')
157 157 try:
158 158 os.mkdir(backupdir)
159 159 except OSError as err:
160 160 if err.errno != errno.EEXIST:
161 161 raise
162 162 try:
163 163 # backup continues
164 164 for f in tobackup:
165 165 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
166 166 dir=backupdir)
167 167 os.close(fd)
168 168 ui.debug('backup %r as %r\n' % (f, tmpname))
169 169 util.copyfile(repo.wjoin(f), tmpname)
170 170 shutil.copystat(repo.wjoin(f), tmpname)
171 171 backups[f] = tmpname
172 172
173 173 fp = cStringIO.StringIO()
174 174 for c in chunks:
175 175 fname = c.filename()
176 176 if fname in backups:
177 177 c.write(fp)
178 178 dopatch = fp.tell()
179 179 fp.seek(0)
180 180
181 181 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
182 182 # 3a. apply filtered patch to clean repo (clean)
183 183 if backups:
184 184 # Equivalent to hg.revert
185 185 choices = lambda key: key in backups
186 186 mergemod.update(repo, repo.dirstate.p1(),
187 187 False, True, choices)
188 188
189 189 # 3b. (apply)
190 190 if dopatch:
191 191 try:
192 192 ui.debug('applying patch\n')
193 193 ui.debug(fp.getvalue())
194 194 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
195 195 except patch.PatchError as err:
196 196 raise util.Abort(str(err))
197 197 del fp
198 198
199 199 # 4. We prepared working directory according to filtered
200 200 # patch. Now is the time to delegate the job to
201 201 # commit/qrefresh or the like!
202 202
203 203 # Make all of the pathnames absolute.
204 204 newfiles = [repo.wjoin(nf) for nf in newfiles]
205 205 return commitfunc(ui, repo, *newfiles, **opts)
206 206 finally:
207 207 # 5. finally restore backed-up files
208 208 try:
209 209 dirstate = repo.dirstate
210 210 for realname, tmpname in backups.iteritems():
211 211 ui.debug('restoring %r to %r\n' % (tmpname, realname))
212 212
213 213 if dirstate[realname] == 'n':
214 214 # without normallookup, restoring timestamp
215 215 # may cause partially committed files
216 216 # to be treated as unmodified
217 217 dirstate.normallookup(realname)
218 218
219 219 util.copyfile(tmpname, repo.wjoin(realname))
220 220 # Our calls to copystat() here and above are a
221 221 # hack to trick any editors that have f open that
222 222 # we haven't modified them.
223 223 #
224 224 # Also note that this racy as an editor could
225 225 # notice the file's mtime before we've finished
226 226 # writing it.
227 227 shutil.copystat(tmpname, repo.wjoin(realname))
228 228 os.unlink(tmpname)
229 229 if tobackup:
230 230 os.rmdir(backupdir)
231 231 except OSError:
232 232 pass
233 233
234 234 def recordinwlock(ui, repo, message, match, opts):
235 235 wlock = repo.wlock()
236 236 try:
237 237 return recordfunc(ui, repo, message, match, opts)
238 238 finally:
239 239 wlock.release()
240 240
241 241 return commit(ui, repo, recordinwlock, pats, opts)
242 242
243 243 def findpossible(cmd, table, strict=False):
244 244 """
245 245 Return cmd -> (aliases, command table entry)
246 246 for each matching command.
247 247 Return debug commands (or their aliases) only if no normal command matches.
248 248 """
249 249 choice = {}
250 250 debugchoice = {}
251 251
252 252 if cmd in table:
253 253 # short-circuit exact matches, "log" alias beats "^log|history"
254 254 keys = [cmd]
255 255 else:
256 256 keys = table.keys()
257 257
258 258 allcmds = []
259 259 for e in keys:
260 260 aliases = parsealiases(e)
261 261 allcmds.extend(aliases)
262 262 found = None
263 263 if cmd in aliases:
264 264 found = cmd
265 265 elif not strict:
266 266 for a in aliases:
267 267 if a.startswith(cmd):
268 268 found = a
269 269 break
270 270 if found is not None:
271 271 if aliases[0].startswith("debug") or found.startswith("debug"):
272 272 debugchoice[found] = (aliases, table[e])
273 273 else:
274 274 choice[found] = (aliases, table[e])
275 275
276 276 if not choice and debugchoice:
277 277 choice = debugchoice
278 278
279 279 return choice, allcmds
280 280
281 281 def findcmd(cmd, table, strict=True):
282 282 """Return (aliases, command table entry) for command string."""
283 283 choice, allcmds = findpossible(cmd, table, strict)
284 284
285 285 if cmd in choice:
286 286 return choice[cmd]
287 287
288 288 if len(choice) > 1:
289 289 clist = choice.keys()
290 290 clist.sort()
291 291 raise error.AmbiguousCommand(cmd, clist)
292 292
293 293 if choice:
294 294 return choice.values()[0]
295 295
296 296 raise error.UnknownCommand(cmd, allcmds)
297 297
298 298 def findrepo(p):
299 299 while not os.path.isdir(os.path.join(p, ".hg")):
300 300 oldp, p = p, os.path.dirname(p)
301 301 if p == oldp:
302 302 return None
303 303
304 304 return p
305 305
306 306 def bailifchanged(repo, merge=True):
307 307 if merge and repo.dirstate.p2() != nullid:
308 308 raise util.Abort(_('outstanding uncommitted merge'))
309 309 modified, added, removed, deleted = repo.status()[:4]
310 310 if modified or added or removed or deleted:
311 311 raise util.Abort(_('uncommitted changes'))
312 312 ctx = repo[None]
313 313 for s in sorted(ctx.substate):
314 314 ctx.sub(s).bailifchanged()
315 315
316 316 def logmessage(ui, opts):
317 317 """ get the log message according to -m and -l option """
318 318 message = opts.get('message')
319 319 logfile = opts.get('logfile')
320 320
321 321 if message and logfile:
322 322 raise util.Abort(_('options --message and --logfile are mutually '
323 323 'exclusive'))
324 324 if not message and logfile:
325 325 try:
326 326 if logfile == '-':
327 327 message = ui.fin.read()
328 328 else:
329 329 message = '\n'.join(util.readfile(logfile).splitlines())
330 330 except IOError as inst:
331 331 raise util.Abort(_("can't read commit message '%s': %s") %
332 332 (logfile, inst.strerror))
333 333 return message
334 334
335 335 def mergeeditform(ctxorbool, baseformname):
336 336 """return appropriate editform name (referencing a committemplate)
337 337
338 338 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
339 339 merging is committed.
340 340
341 341 This returns baseformname with '.merge' appended if it is a merge,
342 342 otherwise '.normal' is appended.
343 343 """
344 344 if isinstance(ctxorbool, bool):
345 345 if ctxorbool:
346 346 return baseformname + ".merge"
347 347 elif 1 < len(ctxorbool.parents()):
348 348 return baseformname + ".merge"
349 349
350 350 return baseformname + ".normal"
351 351
352 352 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
353 353 editform='', **opts):
354 354 """get appropriate commit message editor according to '--edit' option
355 355
356 356 'finishdesc' is a function to be called with edited commit message
357 357 (= 'description' of the new changeset) just after editing, but
358 358 before checking empty-ness. It should return actual text to be
359 359 stored into history. This allows to change description before
360 360 storing.
361 361
362 362 'extramsg' is a extra message to be shown in the editor instead of
363 363 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
364 364 is automatically added.
365 365
366 366 'editform' is a dot-separated list of names, to distinguish
367 367 the purpose of commit text editing.
368 368
369 369 'getcommiteditor' returns 'commitforceeditor' regardless of
370 370 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
371 371 they are specific for usage in MQ.
372 372 """
373 373 if edit or finishdesc or extramsg:
374 374 return lambda r, c, s: commitforceeditor(r, c, s,
375 375 finishdesc=finishdesc,
376 376 extramsg=extramsg,
377 377 editform=editform)
378 378 elif editform:
379 379 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
380 380 else:
381 381 return commiteditor
382 382
383 383 def loglimit(opts):
384 384 """get the log limit according to option -l/--limit"""
385 385 limit = opts.get('limit')
386 386 if limit:
387 387 try:
388 388 limit = int(limit)
389 389 except ValueError:
390 390 raise util.Abort(_('limit must be a positive integer'))
391 391 if limit <= 0:
392 392 raise util.Abort(_('limit must be positive'))
393 393 else:
394 394 limit = None
395 395 return limit
396 396
397 397 def makefilename(repo, pat, node, desc=None,
398 398 total=None, seqno=None, revwidth=None, pathname=None):
399 399 node_expander = {
400 400 'H': lambda: hex(node),
401 401 'R': lambda: str(repo.changelog.rev(node)),
402 402 'h': lambda: short(node),
403 403 'm': lambda: re.sub('[^\w]', '_', str(desc))
404 404 }
405 405 expander = {
406 406 '%': lambda: '%',
407 407 'b': lambda: os.path.basename(repo.root),
408 408 }
409 409
410 410 try:
411 411 if node:
412 412 expander.update(node_expander)
413 413 if node:
414 414 expander['r'] = (lambda:
415 415 str(repo.changelog.rev(node)).zfill(revwidth or 0))
416 416 if total is not None:
417 417 expander['N'] = lambda: str(total)
418 418 if seqno is not None:
419 419 expander['n'] = lambda: str(seqno)
420 420 if total is not None and seqno is not None:
421 421 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
422 422 if pathname is not None:
423 423 expander['s'] = lambda: os.path.basename(pathname)
424 424 expander['d'] = lambda: os.path.dirname(pathname) or '.'
425 425 expander['p'] = lambda: pathname
426 426
427 427 newname = []
428 428 patlen = len(pat)
429 429 i = 0
430 430 while i < patlen:
431 431 c = pat[i]
432 432 if c == '%':
433 433 i += 1
434 434 c = pat[i]
435 435 c = expander[c]()
436 436 newname.append(c)
437 437 i += 1
438 438 return ''.join(newname)
439 439 except KeyError as inst:
440 440 raise util.Abort(_("invalid format spec '%%%s' in output filename") %
441 441 inst.args[0])
442 442
443 443 def makefileobj(repo, pat, node=None, desc=None, total=None,
444 444 seqno=None, revwidth=None, mode='wb', modemap=None,
445 445 pathname=None):
446 446
447 447 writable = mode not in ('r', 'rb')
448 448
449 449 if not pat or pat == '-':
450 450 if writable:
451 451 fp = repo.ui.fout
452 452 else:
453 453 fp = repo.ui.fin
454 454 if util.safehasattr(fp, 'fileno'):
455 455 return os.fdopen(os.dup(fp.fileno()), mode)
456 456 else:
457 457 # if this fp can't be duped properly, return
458 458 # a dummy object that can be closed
459 459 class wrappedfileobj(object):
460 460 noop = lambda x: None
461 461 def __init__(self, f):
462 462 self.f = f
463 463 def __getattr__(self, attr):
464 464 if attr == 'close':
465 465 return self.noop
466 466 else:
467 467 return getattr(self.f, attr)
468 468
469 469 return wrappedfileobj(fp)
470 470 if util.safehasattr(pat, 'write') and writable:
471 471 return pat
472 472 if util.safehasattr(pat, 'read') and 'r' in mode:
473 473 return pat
474 474 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
475 475 if modemap is not None:
476 476 mode = modemap.get(fn, mode)
477 477 if mode == 'wb':
478 478 modemap[fn] = 'ab'
479 479 return open(fn, mode)
480 480
481 481 def openrevlog(repo, cmd, file_, opts):
482 482 """opens the changelog, manifest, a filelog or a given revlog"""
483 483 cl = opts['changelog']
484 484 mf = opts['manifest']
485 485 dir = opts['dir']
486 486 msg = None
487 487 if cl and mf:
488 488 msg = _('cannot specify --changelog and --manifest at the same time')
489 489 elif cl and dir:
490 490 msg = _('cannot specify --changelog and --dir at the same time')
491 491 elif cl or mf:
492 492 if file_:
493 493 msg = _('cannot specify filename with --changelog or --manifest')
494 494 elif not repo:
495 495 msg = _('cannot specify --changelog or --manifest or --dir '
496 496 'without a repository')
497 497 if msg:
498 498 raise util.Abort(msg)
499 499
500 500 r = None
501 501 if repo:
502 502 if cl:
503 503 r = repo.unfiltered().changelog
504 504 elif dir:
505 505 if 'treemanifest' not in repo.requirements:
506 506 raise util.Abort(_("--dir can only be used on repos with "
507 507 "treemanifest enabled"))
508 508 dirlog = repo.dirlog(file_)
509 509 if len(dirlog):
510 510 r = dirlog
511 511 elif mf:
512 512 r = repo.manifest
513 513 elif file_:
514 514 filelog = repo.file(file_)
515 515 if len(filelog):
516 516 r = filelog
517 517 if not r:
518 518 if not file_:
519 519 raise error.CommandError(cmd, _('invalid arguments'))
520 520 if not os.path.isfile(file_):
521 521 raise util.Abort(_("revlog '%s' not found") % file_)
522 522 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
523 523 file_[:-2] + ".i")
524 524 return r
525 525
526 526 def copy(ui, repo, pats, opts, rename=False):
527 527 # called with the repo lock held
528 528 #
529 529 # hgsep => pathname that uses "/" to separate directories
530 530 # ossep => pathname that uses os.sep to separate directories
531 531 cwd = repo.getcwd()
532 532 targets = {}
533 533 after = opts.get("after")
534 534 dryrun = opts.get("dry_run")
535 535 wctx = repo[None]
536 536
537 537 def walkpat(pat):
538 538 srcs = []
539 539 if after:
540 540 badstates = '?'
541 541 else:
542 542 badstates = '?r'
543 543 m = scmutil.match(repo[None], [pat], opts, globbed=True)
544 544 for abs in repo.walk(m):
545 545 state = repo.dirstate[abs]
546 546 rel = m.rel(abs)
547 547 exact = m.exact(abs)
548 548 if state in badstates:
549 549 if exact and state == '?':
550 550 ui.warn(_('%s: not copying - file is not managed\n') % rel)
551 551 if exact and state == 'r':
552 552 ui.warn(_('%s: not copying - file has been marked for'
553 553 ' remove\n') % rel)
554 554 continue
555 555 # abs: hgsep
556 556 # rel: ossep
557 557 srcs.append((abs, rel, exact))
558 558 return srcs
559 559
560 560 # abssrc: hgsep
561 561 # relsrc: ossep
562 562 # otarget: ossep
563 563 def copyfile(abssrc, relsrc, otarget, exact):
564 564 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
565 565 if '/' in abstarget:
566 566 # We cannot normalize abstarget itself, this would prevent
567 567 # case only renames, like a => A.
568 568 abspath, absname = abstarget.rsplit('/', 1)
569 569 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
570 570 reltarget = repo.pathto(abstarget, cwd)
571 571 target = repo.wjoin(abstarget)
572 572 src = repo.wjoin(abssrc)
573 573 state = repo.dirstate[abstarget]
574 574
575 575 scmutil.checkportable(ui, abstarget)
576 576
577 577 # check for collisions
578 578 prevsrc = targets.get(abstarget)
579 579 if prevsrc is not None:
580 580 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
581 581 (reltarget, repo.pathto(abssrc, cwd),
582 582 repo.pathto(prevsrc, cwd)))
583 583 return
584 584
585 585 # check for overwrites
586 586 exists = os.path.lexists(target)
587 587 samefile = False
588 588 if exists and abssrc != abstarget:
589 589 if (repo.dirstate.normalize(abssrc) ==
590 590 repo.dirstate.normalize(abstarget)):
591 591 if not rename:
592 592 ui.warn(_("%s: can't copy - same file\n") % reltarget)
593 593 return
594 594 exists = False
595 595 samefile = True
596 596
597 597 if not after and exists or after and state in 'mn':
598 598 if not opts['force']:
599 599 ui.warn(_('%s: not overwriting - file exists\n') %
600 600 reltarget)
601 601 return
602 602
603 603 if after:
604 604 if not exists:
605 605 if rename:
606 606 ui.warn(_('%s: not recording move - %s does not exist\n') %
607 607 (relsrc, reltarget))
608 608 else:
609 609 ui.warn(_('%s: not recording copy - %s does not exist\n') %
610 610 (relsrc, reltarget))
611 611 return
612 612 elif not dryrun:
613 613 try:
614 614 if exists:
615 615 os.unlink(target)
616 616 targetdir = os.path.dirname(target) or '.'
617 617 if not os.path.isdir(targetdir):
618 618 os.makedirs(targetdir)
619 619 if samefile:
620 620 tmp = target + "~hgrename"
621 621 os.rename(src, tmp)
622 622 os.rename(tmp, target)
623 623 else:
624 624 util.copyfile(src, target)
625 625 srcexists = True
626 626 except IOError as inst:
627 627 if inst.errno == errno.ENOENT:
628 628 ui.warn(_('%s: deleted in working directory\n') % relsrc)
629 629 srcexists = False
630 630 else:
631 631 ui.warn(_('%s: cannot copy - %s\n') %
632 632 (relsrc, inst.strerror))
633 633 return True # report a failure
634 634
635 635 if ui.verbose or not exact:
636 636 if rename:
637 637 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
638 638 else:
639 639 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
640 640
641 641 targets[abstarget] = abssrc
642 642
643 643 # fix up dirstate
644 644 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
645 645 dryrun=dryrun, cwd=cwd)
646 646 if rename and not dryrun:
647 647 if not after and srcexists and not samefile:
648 648 util.unlinkpath(repo.wjoin(abssrc))
649 649 wctx.forget([abssrc])
650 650
651 651 # pat: ossep
652 652 # dest ossep
653 653 # srcs: list of (hgsep, hgsep, ossep, bool)
654 654 # return: function that takes hgsep and returns ossep
655 655 def targetpathfn(pat, dest, srcs):
656 656 if os.path.isdir(pat):
657 657 abspfx = pathutil.canonpath(repo.root, cwd, pat)
658 658 abspfx = util.localpath(abspfx)
659 659 if destdirexists:
660 660 striplen = len(os.path.split(abspfx)[0])
661 661 else:
662 662 striplen = len(abspfx)
663 663 if striplen:
664 664 striplen += len(os.sep)
665 665 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
666 666 elif destdirexists:
667 667 res = lambda p: os.path.join(dest,
668 668 os.path.basename(util.localpath(p)))
669 669 else:
670 670 res = lambda p: dest
671 671 return res
672 672
673 673 # pat: ossep
674 674 # dest ossep
675 675 # srcs: list of (hgsep, hgsep, ossep, bool)
676 676 # return: function that takes hgsep and returns ossep
677 677 def targetpathafterfn(pat, dest, srcs):
678 678 if matchmod.patkind(pat):
679 679 # a mercurial pattern
680 680 res = lambda p: os.path.join(dest,
681 681 os.path.basename(util.localpath(p)))
682 682 else:
683 683 abspfx = pathutil.canonpath(repo.root, cwd, pat)
684 684 if len(abspfx) < len(srcs[0][0]):
685 685 # A directory. Either the target path contains the last
686 686 # component of the source path or it does not.
687 687 def evalpath(striplen):
688 688 score = 0
689 689 for s in srcs:
690 690 t = os.path.join(dest, util.localpath(s[0])[striplen:])
691 691 if os.path.lexists(t):
692 692 score += 1
693 693 return score
694 694
695 695 abspfx = util.localpath(abspfx)
696 696 striplen = len(abspfx)
697 697 if striplen:
698 698 striplen += len(os.sep)
699 699 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
700 700 score = evalpath(striplen)
701 701 striplen1 = len(os.path.split(abspfx)[0])
702 702 if striplen1:
703 703 striplen1 += len(os.sep)
704 704 if evalpath(striplen1) > score:
705 705 striplen = striplen1
706 706 res = lambda p: os.path.join(dest,
707 707 util.localpath(p)[striplen:])
708 708 else:
709 709 # a file
710 710 if destdirexists:
711 711 res = lambda p: os.path.join(dest,
712 712 os.path.basename(util.localpath(p)))
713 713 else:
714 714 res = lambda p: dest
715 715 return res
716 716
717 717 pats = scmutil.expandpats(pats)
718 718 if not pats:
719 719 raise util.Abort(_('no source or destination specified'))
720 720 if len(pats) == 1:
721 721 raise util.Abort(_('no destination specified'))
722 722 dest = pats.pop()
723 723 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
724 724 if not destdirexists:
725 725 if len(pats) > 1 or matchmod.patkind(pats[0]):
726 726 raise util.Abort(_('with multiple sources, destination must be an '
727 727 'existing directory'))
728 728 if util.endswithsep(dest):
729 729 raise util.Abort(_('destination %s is not a directory') % dest)
730 730
731 731 tfn = targetpathfn
732 732 if after:
733 733 tfn = targetpathafterfn
734 734 copylist = []
735 735 for pat in pats:
736 736 srcs = walkpat(pat)
737 737 if not srcs:
738 738 continue
739 739 copylist.append((tfn(pat, dest, srcs), srcs))
740 740 if not copylist:
741 741 raise util.Abort(_('no files to copy'))
742 742
743 743 errors = 0
744 744 for targetpath, srcs in copylist:
745 745 for abssrc, relsrc, exact in srcs:
746 746 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
747 747 errors += 1
748 748
749 749 if errors:
750 750 ui.warn(_('(consider using --after)\n'))
751 751
752 752 return errors != 0
753 753
754 754 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
755 755 runargs=None, appendpid=False):
756 756 '''Run a command as a service.'''
757 757
758 758 def writepid(pid):
759 759 if opts['pid_file']:
760 760 if appendpid:
761 761 mode = 'a'
762 762 else:
763 763 mode = 'w'
764 764 fp = open(opts['pid_file'], mode)
765 765 fp.write(str(pid) + '\n')
766 766 fp.close()
767 767
768 768 if opts['daemon'] and not opts['daemon_pipefds']:
769 769 # Signal child process startup with file removal
770 770 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
771 771 os.close(lockfd)
772 772 try:
773 773 if not runargs:
774 774 runargs = util.hgcmd() + sys.argv[1:]
775 775 runargs.append('--daemon-pipefds=%s' % lockpath)
776 776 # Don't pass --cwd to the child process, because we've already
777 777 # changed directory.
778 778 for i in xrange(1, len(runargs)):
779 779 if runargs[i].startswith('--cwd='):
780 780 del runargs[i]
781 781 break
782 782 elif runargs[i].startswith('--cwd'):
783 783 del runargs[i:i + 2]
784 784 break
785 785 def condfn():
786 786 return not os.path.exists(lockpath)
787 787 pid = util.rundetached(runargs, condfn)
788 788 if pid < 0:
789 789 raise util.Abort(_('child process failed to start'))
790 790 writepid(pid)
791 791 finally:
792 792 try:
793 793 os.unlink(lockpath)
794 794 except OSError as e:
795 795 if e.errno != errno.ENOENT:
796 796 raise
797 797 if parentfn:
798 798 return parentfn(pid)
799 799 else:
800 800 return
801 801
802 802 if initfn:
803 803 initfn()
804 804
805 805 if not opts['daemon']:
806 806 writepid(os.getpid())
807 807
808 808 if opts['daemon_pipefds']:
809 809 lockpath = opts['daemon_pipefds']
810 810 try:
811 811 os.setsid()
812 812 except AttributeError:
813 813 pass
814 814 os.unlink(lockpath)
815 815 util.hidewindow()
816 816 sys.stdout.flush()
817 817 sys.stderr.flush()
818 818
819 819 nullfd = os.open(os.devnull, os.O_RDWR)
820 820 logfilefd = nullfd
821 821 if logfile:
822 822 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
823 823 os.dup2(nullfd, 0)
824 824 os.dup2(logfilefd, 1)
825 825 os.dup2(logfilefd, 2)
826 826 if nullfd not in (0, 1, 2):
827 827 os.close(nullfd)
828 828 if logfile and logfilefd not in (0, 1, 2):
829 829 os.close(logfilefd)
830 830
831 831 if runfn:
832 832 return runfn()
833 833
834 834 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
835 835 """Utility function used by commands.import to import a single patch
836 836
837 837 This function is explicitly defined here to help the evolve extension to
838 838 wrap this part of the import logic.
839 839
840 840 The API is currently a bit ugly because it a simple code translation from
841 841 the import command. Feel free to make it better.
842 842
843 843 :hunk: a patch (as a binary string)
844 844 :parents: nodes that will be parent of the created commit
845 845 :opts: the full dict of option passed to the import command
846 846 :msgs: list to save commit message to.
847 847 (used in case we need to save it when failing)
848 848 :updatefunc: a function that update a repo to a given node
849 849 updatefunc(<repo>, <node>)
850 850 """
851 851 # avoid cycle context -> subrepo -> cmdutil
852 852 import context
853 tmpname, message, user, date, branch, nodeid, p1, p2 = \
854 patch.extract(ui, hunk)
853 extractdata = patch.extract(ui, hunk)
854 tmpname = extractdata.get('filename')
855 message = extractdata.get('message')
856 user = extractdata.get('user')
857 date = extractdata.get('date')
858 branch = extractdata.get('branch')
859 nodeid = extractdata.get('nodeid')
860 p1 = extractdata.get('p1')
861 p2 = extractdata.get('p2')
855 862
856 863 update = not opts.get('bypass')
857 864 strip = opts["strip"]
858 865 prefix = opts["prefix"]
859 866 sim = float(opts.get('similarity') or 0)
860 867 if not tmpname:
861 868 return (None, None, False)
862 869 msg = _('applied to working directory')
863 870
864 871 rejects = False
865 872 dsguard = None
866 873
867 874 try:
868 875 cmdline_message = logmessage(ui, opts)
869 876 if cmdline_message:
870 877 # pickup the cmdline msg
871 878 message = cmdline_message
872 879 elif message:
873 880 # pickup the patch msg
874 881 message = message.strip()
875 882 else:
876 883 # launch the editor
877 884 message = None
878 885 ui.debug('message:\n%s\n' % message)
879 886
880 887 if len(parents) == 1:
881 888 parents.append(repo[nullid])
882 889 if opts.get('exact'):
883 890 if not nodeid or not p1:
884 891 raise util.Abort(_('not a Mercurial patch'))
885 892 p1 = repo[p1]
886 893 p2 = repo[p2 or nullid]
887 894 elif p2:
888 895 try:
889 896 p1 = repo[p1]
890 897 p2 = repo[p2]
891 898 # Without any options, consider p2 only if the
892 899 # patch is being applied on top of the recorded
893 900 # first parent.
894 901 if p1 != parents[0]:
895 902 p1 = parents[0]
896 903 p2 = repo[nullid]
897 904 except error.RepoError:
898 905 p1, p2 = parents
899 906 if p2.node() == nullid:
900 907 ui.warn(_("warning: import the patch as a normal revision\n"
901 908 "(use --exact to import the patch as a merge)\n"))
902 909 else:
903 910 p1, p2 = parents
904 911
905 912 n = None
906 913 if update:
907 914 dsguard = dirstateguard(repo, 'tryimportone')
908 915 if p1 != parents[0]:
909 916 updatefunc(repo, p1.node())
910 917 if p2 != parents[1]:
911 918 repo.setparents(p1.node(), p2.node())
912 919
913 920 if opts.get('exact') or opts.get('import_branch'):
914 921 repo.dirstate.setbranch(branch or 'default')
915 922
916 923 partial = opts.get('partial', False)
917 924 files = set()
918 925 try:
919 926 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
920 927 files=files, eolmode=None, similarity=sim / 100.0)
921 928 except patch.PatchError as e:
922 929 if not partial:
923 930 raise util.Abort(str(e))
924 931 if partial:
925 932 rejects = True
926 933
927 934 files = list(files)
928 935 if opts.get('no_commit'):
929 936 if message:
930 937 msgs.append(message)
931 938 else:
932 939 if opts.get('exact') or p2:
933 940 # If you got here, you either use --force and know what
934 941 # you are doing or used --exact or a merge patch while
935 942 # being updated to its first parent.
936 943 m = None
937 944 else:
938 945 m = scmutil.matchfiles(repo, files or [])
939 946 editform = mergeeditform(repo[None], 'import.normal')
940 947 if opts.get('exact'):
941 948 editor = None
942 949 else:
943 950 editor = getcommiteditor(editform=editform, **opts)
944 951 allowemptyback = repo.ui.backupconfig('ui', 'allowemptycommit')
945 952 try:
946 953 if partial:
947 954 repo.ui.setconfig('ui', 'allowemptycommit', True)
948 955 n = repo.commit(message, opts.get('user') or user,
949 956 opts.get('date') or date, match=m,
950 957 editor=editor)
951 958 finally:
952 959 repo.ui.restoreconfig(allowemptyback)
953 960 dsguard.close()
954 961 else:
955 962 if opts.get('exact') or opts.get('import_branch'):
956 963 branch = branch or 'default'
957 964 else:
958 965 branch = p1.branch()
959 966 store = patch.filestore()
960 967 try:
961 968 files = set()
962 969 try:
963 970 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
964 971 files, eolmode=None)
965 972 except patch.PatchError as e:
966 973 raise util.Abort(str(e))
967 974 if opts.get('exact'):
968 975 editor = None
969 976 else:
970 977 editor = getcommiteditor(editform='import.bypass')
971 978 memctx = context.makememctx(repo, (p1.node(), p2.node()),
972 979 message,
973 980 opts.get('user') or user,
974 981 opts.get('date') or date,
975 982 branch, files, store,
976 983 editor=editor)
977 984 n = memctx.commit()
978 985 finally:
979 986 store.close()
980 987 if opts.get('exact') and opts.get('no_commit'):
981 988 # --exact with --no-commit is still useful in that it does merge
982 989 # and branch bits
983 990 ui.warn(_("warning: can't check exact import with --no-commit\n"))
984 991 elif opts.get('exact') and hex(n) != nodeid:
985 992 raise util.Abort(_('patch is damaged or loses information'))
986 993 if n:
987 994 # i18n: refers to a short changeset id
988 995 msg = _('created %s') % short(n)
989 996 return (msg, n, rejects)
990 997 finally:
991 998 lockmod.release(dsguard)
992 999 os.unlink(tmpname)
993 1000
994 1001 # facility to let extensions include additional data in an exported patch
995 1002 # list of identifiers to be executed in order
996 1003 extraexport = []
997 1004 # mapping from identifier to actual export function
998 1005 # function as to return a string to be added to the header or None
999 1006 # it is given two arguments (sequencenumber, changectx)
1000 1007 extraexportmap = {}
1001 1008
1002 1009 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1003 1010 opts=None, match=None):
1004 1011 '''export changesets as hg patches.'''
1005 1012
1006 1013 total = len(revs)
1007 1014 revwidth = max([len(str(rev)) for rev in revs])
1008 1015 filemode = {}
1009 1016
1010 1017 def single(rev, seqno, fp):
1011 1018 ctx = repo[rev]
1012 1019 node = ctx.node()
1013 1020 parents = [p.node() for p in ctx.parents() if p]
1014 1021 branch = ctx.branch()
1015 1022 if switch_parent:
1016 1023 parents.reverse()
1017 1024
1018 1025 if parents:
1019 1026 prev = parents[0]
1020 1027 else:
1021 1028 prev = nullid
1022 1029
1023 1030 shouldclose = False
1024 1031 if not fp and len(template) > 0:
1025 1032 desc_lines = ctx.description().rstrip().split('\n')
1026 1033 desc = desc_lines[0] #Commit always has a first line.
1027 1034 fp = makefileobj(repo, template, node, desc=desc, total=total,
1028 1035 seqno=seqno, revwidth=revwidth, mode='wb',
1029 1036 modemap=filemode)
1030 1037 if fp != template:
1031 1038 shouldclose = True
1032 1039 if fp and fp != sys.stdout and util.safehasattr(fp, 'name'):
1033 1040 repo.ui.note("%s\n" % fp.name)
1034 1041
1035 1042 if not fp:
1036 1043 write = repo.ui.write
1037 1044 else:
1038 1045 def write(s, **kw):
1039 1046 fp.write(s)
1040 1047
1041 1048 write("# HG changeset patch\n")
1042 1049 write("# User %s\n" % ctx.user())
1043 1050 write("# Date %d %d\n" % ctx.date())
1044 1051 write("# %s\n" % util.datestr(ctx.date()))
1045 1052 if branch and branch != 'default':
1046 1053 write("# Branch %s\n" % branch)
1047 1054 write("# Node ID %s\n" % hex(node))
1048 1055 write("# Parent %s\n" % hex(prev))
1049 1056 if len(parents) > 1:
1050 1057 write("# Parent %s\n" % hex(parents[1]))
1051 1058
1052 1059 for headerid in extraexport:
1053 1060 header = extraexportmap[headerid](seqno, ctx)
1054 1061 if header is not None:
1055 1062 write('# %s\n' % header)
1056 1063 write(ctx.description().rstrip())
1057 1064 write("\n\n")
1058 1065
1059 1066 for chunk, label in patch.diffui(repo, prev, node, match, opts=opts):
1060 1067 write(chunk, label=label)
1061 1068
1062 1069 if shouldclose:
1063 1070 fp.close()
1064 1071
1065 1072 for seqno, rev in enumerate(revs):
1066 1073 single(rev, seqno + 1, fp)
1067 1074
1068 1075 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1069 1076 changes=None, stat=False, fp=None, prefix='',
1070 1077 root='', listsubrepos=False):
1071 1078 '''show diff or diffstat.'''
1072 1079 if fp is None:
1073 1080 write = ui.write
1074 1081 else:
1075 1082 def write(s, **kw):
1076 1083 fp.write(s)
1077 1084
1078 1085 if root:
1079 1086 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1080 1087 else:
1081 1088 relroot = ''
1082 1089 if relroot != '':
1083 1090 # XXX relative roots currently don't work if the root is within a
1084 1091 # subrepo
1085 1092 uirelroot = match.uipath(relroot)
1086 1093 relroot += '/'
1087 1094 for matchroot in match.files():
1088 1095 if not matchroot.startswith(relroot):
1089 1096 ui.warn(_('warning: %s not inside relative root %s\n') % (
1090 1097 match.uipath(matchroot), uirelroot))
1091 1098
1092 1099 if stat:
1093 1100 diffopts = diffopts.copy(context=0)
1094 1101 width = 80
1095 1102 if not ui.plain():
1096 1103 width = ui.termwidth()
1097 1104 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1098 1105 prefix=prefix, relroot=relroot)
1099 1106 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1100 1107 width=width,
1101 1108 git=diffopts.git):
1102 1109 write(chunk, label=label)
1103 1110 else:
1104 1111 for chunk, label in patch.diffui(repo, node1, node2, match,
1105 1112 changes, diffopts, prefix=prefix,
1106 1113 relroot=relroot):
1107 1114 write(chunk, label=label)
1108 1115
1109 1116 if listsubrepos:
1110 1117 ctx1 = repo[node1]
1111 1118 ctx2 = repo[node2]
1112 1119 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1113 1120 tempnode2 = node2
1114 1121 try:
1115 1122 if node2 is not None:
1116 1123 tempnode2 = ctx2.substate[subpath][1]
1117 1124 except KeyError:
1118 1125 # A subrepo that existed in node1 was deleted between node1 and
1119 1126 # node2 (inclusive). Thus, ctx2's substate won't contain that
1120 1127 # subpath. The best we can do is to ignore it.
1121 1128 tempnode2 = None
1122 1129 submatch = matchmod.narrowmatcher(subpath, match)
1123 1130 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1124 1131 stat=stat, fp=fp, prefix=prefix)
1125 1132
1126 1133 class changeset_printer(object):
1127 1134 '''show changeset information when templating not requested.'''
1128 1135
1129 1136 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1130 1137 self.ui = ui
1131 1138 self.repo = repo
1132 1139 self.buffered = buffered
1133 1140 self.matchfn = matchfn
1134 1141 self.diffopts = diffopts
1135 1142 self.header = {}
1136 1143 self.hunk = {}
1137 1144 self.lastheader = None
1138 1145 self.footer = None
1139 1146
1140 1147 def flush(self, ctx):
1141 1148 rev = ctx.rev()
1142 1149 if rev in self.header:
1143 1150 h = self.header[rev]
1144 1151 if h != self.lastheader:
1145 1152 self.lastheader = h
1146 1153 self.ui.write(h)
1147 1154 del self.header[rev]
1148 1155 if rev in self.hunk:
1149 1156 self.ui.write(self.hunk[rev])
1150 1157 del self.hunk[rev]
1151 1158 return 1
1152 1159 return 0
1153 1160
1154 1161 def close(self):
1155 1162 if self.footer:
1156 1163 self.ui.write(self.footer)
1157 1164
1158 1165 def show(self, ctx, copies=None, matchfn=None, **props):
1159 1166 if self.buffered:
1160 1167 self.ui.pushbuffer()
1161 1168 self._show(ctx, copies, matchfn, props)
1162 1169 self.hunk[ctx.rev()] = self.ui.popbuffer(labeled=True)
1163 1170 else:
1164 1171 self._show(ctx, copies, matchfn, props)
1165 1172
1166 1173 def _show(self, ctx, copies, matchfn, props):
1167 1174 '''show a single changeset or file revision'''
1168 1175 changenode = ctx.node()
1169 1176 rev = ctx.rev()
1170 1177 if self.ui.debugflag:
1171 1178 hexfunc = hex
1172 1179 else:
1173 1180 hexfunc = short
1174 1181 # as of now, wctx.node() and wctx.rev() return None, but we want to
1175 1182 # show the same values as {node} and {rev} templatekw
1176 1183 revnode = (scmutil.intrev(rev), hexfunc(bin(ctx.hex())))
1177 1184
1178 1185 if self.ui.quiet:
1179 1186 self.ui.write("%d:%s\n" % revnode, label='log.node')
1180 1187 return
1181 1188
1182 1189 date = util.datestr(ctx.date())
1183 1190
1184 1191 # i18n: column positioning for "hg log"
1185 1192 self.ui.write(_("changeset: %d:%s\n") % revnode,
1186 1193 label='log.changeset changeset.%s' % ctx.phasestr())
1187 1194
1188 1195 # branches are shown first before any other names due to backwards
1189 1196 # compatibility
1190 1197 branch = ctx.branch()
1191 1198 # don't show the default branch name
1192 1199 if branch != 'default':
1193 1200 # i18n: column positioning for "hg log"
1194 1201 self.ui.write(_("branch: %s\n") % branch,
1195 1202 label='log.branch')
1196 1203
1197 1204 for name, ns in self.repo.names.iteritems():
1198 1205 # branches has special logic already handled above, so here we just
1199 1206 # skip it
1200 1207 if name == 'branches':
1201 1208 continue
1202 1209 # we will use the templatename as the color name since those two
1203 1210 # should be the same
1204 1211 for name in ns.names(self.repo, changenode):
1205 1212 self.ui.write(ns.logfmt % name,
1206 1213 label='log.%s' % ns.colorname)
1207 1214 if self.ui.debugflag:
1208 1215 # i18n: column positioning for "hg log"
1209 1216 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1210 1217 label='log.phase')
1211 1218 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1212 1219 label = 'log.parent changeset.%s' % pctx.phasestr()
1213 1220 # i18n: column positioning for "hg log"
1214 1221 self.ui.write(_("parent: %d:%s\n")
1215 1222 % (pctx.rev(), hexfunc(pctx.node())),
1216 1223 label=label)
1217 1224
1218 1225 if self.ui.debugflag and rev is not None:
1219 1226 mnode = ctx.manifestnode()
1220 1227 # i18n: column positioning for "hg log"
1221 1228 self.ui.write(_("manifest: %d:%s\n") %
1222 1229 (self.repo.manifest.rev(mnode), hex(mnode)),
1223 1230 label='ui.debug log.manifest')
1224 1231 # i18n: column positioning for "hg log"
1225 1232 self.ui.write(_("user: %s\n") % ctx.user(),
1226 1233 label='log.user')
1227 1234 # i18n: column positioning for "hg log"
1228 1235 self.ui.write(_("date: %s\n") % date,
1229 1236 label='log.date')
1230 1237
1231 1238 if self.ui.debugflag:
1232 1239 files = ctx.p1().status(ctx)[:3]
1233 1240 for key, value in zip([# i18n: column positioning for "hg log"
1234 1241 _("files:"),
1235 1242 # i18n: column positioning for "hg log"
1236 1243 _("files+:"),
1237 1244 # i18n: column positioning for "hg log"
1238 1245 _("files-:")], files):
1239 1246 if value:
1240 1247 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1241 1248 label='ui.debug log.files')
1242 1249 elif ctx.files() and self.ui.verbose:
1243 1250 # i18n: column positioning for "hg log"
1244 1251 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1245 1252 label='ui.note log.files')
1246 1253 if copies and self.ui.verbose:
1247 1254 copies = ['%s (%s)' % c for c in copies]
1248 1255 # i18n: column positioning for "hg log"
1249 1256 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1250 1257 label='ui.note log.copies')
1251 1258
1252 1259 extra = ctx.extra()
1253 1260 if extra and self.ui.debugflag:
1254 1261 for key, value in sorted(extra.items()):
1255 1262 # i18n: column positioning for "hg log"
1256 1263 self.ui.write(_("extra: %s=%s\n")
1257 1264 % (key, value.encode('string_escape')),
1258 1265 label='ui.debug log.extra')
1259 1266
1260 1267 description = ctx.description().strip()
1261 1268 if description:
1262 1269 if self.ui.verbose:
1263 1270 self.ui.write(_("description:\n"),
1264 1271 label='ui.note log.description')
1265 1272 self.ui.write(description,
1266 1273 label='ui.note log.description')
1267 1274 self.ui.write("\n\n")
1268 1275 else:
1269 1276 # i18n: column positioning for "hg log"
1270 1277 self.ui.write(_("summary: %s\n") %
1271 1278 description.splitlines()[0],
1272 1279 label='log.summary')
1273 1280 self.ui.write("\n")
1274 1281
1275 1282 self.showpatch(changenode, matchfn)
1276 1283
1277 1284 def showpatch(self, node, matchfn):
1278 1285 if not matchfn:
1279 1286 matchfn = self.matchfn
1280 1287 if matchfn:
1281 1288 stat = self.diffopts.get('stat')
1282 1289 diff = self.diffopts.get('patch')
1283 1290 diffopts = patch.diffallopts(self.ui, self.diffopts)
1284 1291 prev = self.repo.changelog.parents(node)[0]
1285 1292 if stat:
1286 1293 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1287 1294 match=matchfn, stat=True)
1288 1295 if diff:
1289 1296 if stat:
1290 1297 self.ui.write("\n")
1291 1298 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1292 1299 match=matchfn, stat=False)
1293 1300 self.ui.write("\n")
1294 1301
1295 1302 class jsonchangeset(changeset_printer):
1296 1303 '''format changeset information.'''
1297 1304
1298 1305 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1299 1306 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1300 1307 self.cache = {}
1301 1308 self._first = True
1302 1309
1303 1310 def close(self):
1304 1311 if not self._first:
1305 1312 self.ui.write("\n]\n")
1306 1313 else:
1307 1314 self.ui.write("[]\n")
1308 1315
1309 1316 def _show(self, ctx, copies, matchfn, props):
1310 1317 '''show a single changeset or file revision'''
1311 1318 rev = ctx.rev()
1312 1319 if rev is None:
1313 1320 jrev = jnode = 'null'
1314 1321 else:
1315 1322 jrev = str(rev)
1316 1323 jnode = '"%s"' % hex(ctx.node())
1317 1324 j = encoding.jsonescape
1318 1325
1319 1326 if self._first:
1320 1327 self.ui.write("[\n {")
1321 1328 self._first = False
1322 1329 else:
1323 1330 self.ui.write(",\n {")
1324 1331
1325 1332 if self.ui.quiet:
1326 1333 self.ui.write('\n "rev": %s' % jrev)
1327 1334 self.ui.write(',\n "node": %s' % jnode)
1328 1335 self.ui.write('\n }')
1329 1336 return
1330 1337
1331 1338 self.ui.write('\n "rev": %s' % jrev)
1332 1339 self.ui.write(',\n "node": %s' % jnode)
1333 1340 self.ui.write(',\n "branch": "%s"' % j(ctx.branch()))
1334 1341 self.ui.write(',\n "phase": "%s"' % ctx.phasestr())
1335 1342 self.ui.write(',\n "user": "%s"' % j(ctx.user()))
1336 1343 self.ui.write(',\n "date": [%d, %d]' % ctx.date())
1337 1344 self.ui.write(',\n "desc": "%s"' % j(ctx.description()))
1338 1345
1339 1346 self.ui.write(',\n "bookmarks": [%s]' %
1340 1347 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1341 1348 self.ui.write(',\n "tags": [%s]' %
1342 1349 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1343 1350 self.ui.write(',\n "parents": [%s]' %
1344 1351 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1345 1352
1346 1353 if self.ui.debugflag:
1347 1354 if rev is None:
1348 1355 jmanifestnode = 'null'
1349 1356 else:
1350 1357 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1351 1358 self.ui.write(',\n "manifest": %s' % jmanifestnode)
1352 1359
1353 1360 self.ui.write(',\n "extra": {%s}' %
1354 1361 ", ".join('"%s": "%s"' % (j(k), j(v))
1355 1362 for k, v in ctx.extra().items()))
1356 1363
1357 1364 files = ctx.p1().status(ctx)
1358 1365 self.ui.write(',\n "modified": [%s]' %
1359 1366 ", ".join('"%s"' % j(f) for f in files[0]))
1360 1367 self.ui.write(',\n "added": [%s]' %
1361 1368 ", ".join('"%s"' % j(f) for f in files[1]))
1362 1369 self.ui.write(',\n "removed": [%s]' %
1363 1370 ", ".join('"%s"' % j(f) for f in files[2]))
1364 1371
1365 1372 elif self.ui.verbose:
1366 1373 self.ui.write(',\n "files": [%s]' %
1367 1374 ", ".join('"%s"' % j(f) for f in ctx.files()))
1368 1375
1369 1376 if copies:
1370 1377 self.ui.write(',\n "copies": {%s}' %
1371 1378 ", ".join('"%s": "%s"' % (j(k), j(v))
1372 1379 for k, v in copies))
1373 1380
1374 1381 matchfn = self.matchfn
1375 1382 if matchfn:
1376 1383 stat = self.diffopts.get('stat')
1377 1384 diff = self.diffopts.get('patch')
1378 1385 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1379 1386 node, prev = ctx.node(), ctx.p1().node()
1380 1387 if stat:
1381 1388 self.ui.pushbuffer()
1382 1389 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1383 1390 match=matchfn, stat=True)
1384 1391 self.ui.write(',\n "diffstat": "%s"' % j(self.ui.popbuffer()))
1385 1392 if diff:
1386 1393 self.ui.pushbuffer()
1387 1394 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1388 1395 match=matchfn, stat=False)
1389 1396 self.ui.write(',\n "diff": "%s"' % j(self.ui.popbuffer()))
1390 1397
1391 1398 self.ui.write("\n }")
1392 1399
1393 1400 class changeset_templater(changeset_printer):
1394 1401 '''format changeset information.'''
1395 1402
1396 1403 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1397 1404 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1398 1405 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
1399 1406 defaulttempl = {
1400 1407 'parent': '{rev}:{node|formatnode} ',
1401 1408 'manifest': '{rev}:{node|formatnode}',
1402 1409 'file_copy': '{name} ({source})',
1403 1410 'extra': '{key}={value|stringescape}'
1404 1411 }
1405 1412 # filecopy is preserved for compatibility reasons
1406 1413 defaulttempl['filecopy'] = defaulttempl['file_copy']
1407 1414 self.t = templater.templater(mapfile, {'formatnode': formatnode},
1408 1415 cache=defaulttempl)
1409 1416 if tmpl:
1410 1417 self.t.cache['changeset'] = tmpl
1411 1418
1412 1419 self.cache = {}
1413 1420
1414 1421 # find correct templates for current mode
1415 1422 tmplmodes = [
1416 1423 (True, None),
1417 1424 (self.ui.verbose, 'verbose'),
1418 1425 (self.ui.quiet, 'quiet'),
1419 1426 (self.ui.debugflag, 'debug'),
1420 1427 ]
1421 1428
1422 1429 self._parts = {'header': '', 'footer': '', 'changeset': 'changeset',
1423 1430 'docheader': '', 'docfooter': ''}
1424 1431 for mode, postfix in tmplmodes:
1425 1432 for t in self._parts:
1426 1433 cur = t
1427 1434 if postfix:
1428 1435 cur += "_" + postfix
1429 1436 if mode and cur in self.t:
1430 1437 self._parts[t] = cur
1431 1438
1432 1439 if self._parts['docheader']:
1433 1440 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1434 1441
1435 1442 def close(self):
1436 1443 if self._parts['docfooter']:
1437 1444 if not self.footer:
1438 1445 self.footer = ""
1439 1446 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1440 1447 return super(changeset_templater, self).close()
1441 1448
1442 1449 def _show(self, ctx, copies, matchfn, props):
1443 1450 '''show a single changeset or file revision'''
1444 1451 props = props.copy()
1445 1452 props.update(templatekw.keywords)
1446 1453 props['templ'] = self.t
1447 1454 props['ctx'] = ctx
1448 1455 props['repo'] = self.repo
1449 1456 props['revcache'] = {'copies': copies}
1450 1457 props['cache'] = self.cache
1451 1458
1452 1459 try:
1453 1460 # write header
1454 1461 if self._parts['header']:
1455 1462 h = templater.stringify(self.t(self._parts['header'], **props))
1456 1463 if self.buffered:
1457 1464 self.header[ctx.rev()] = h
1458 1465 else:
1459 1466 if self.lastheader != h:
1460 1467 self.lastheader = h
1461 1468 self.ui.write(h)
1462 1469
1463 1470 # write changeset metadata, then patch if requested
1464 1471 key = self._parts['changeset']
1465 1472 self.ui.write(templater.stringify(self.t(key, **props)))
1466 1473 self.showpatch(ctx.node(), matchfn)
1467 1474
1468 1475 if self._parts['footer']:
1469 1476 if not self.footer:
1470 1477 self.footer = templater.stringify(
1471 1478 self.t(self._parts['footer'], **props))
1472 1479 except KeyError as inst:
1473 1480 msg = _("%s: no key named '%s'")
1474 1481 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
1475 1482 except SyntaxError as inst:
1476 1483 raise util.Abort('%s: %s' % (self.t.mapfile, inst.args[0]))
1477 1484
1478 1485 def gettemplate(ui, tmpl, style):
1479 1486 """
1480 1487 Find the template matching the given template spec or style.
1481 1488 """
1482 1489
1483 1490 # ui settings
1484 1491 if not tmpl and not style: # template are stronger than style
1485 1492 tmpl = ui.config('ui', 'logtemplate')
1486 1493 if tmpl:
1487 1494 try:
1488 1495 tmpl = templater.unquotestring(tmpl)
1489 1496 except SyntaxError:
1490 1497 pass
1491 1498 return tmpl, None
1492 1499 else:
1493 1500 style = util.expandpath(ui.config('ui', 'style', ''))
1494 1501
1495 1502 if not tmpl and style:
1496 1503 mapfile = style
1497 1504 if not os.path.split(mapfile)[0]:
1498 1505 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1499 1506 or templater.templatepath(mapfile))
1500 1507 if mapname:
1501 1508 mapfile = mapname
1502 1509 return None, mapfile
1503 1510
1504 1511 if not tmpl:
1505 1512 return None, None
1506 1513
1507 1514 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1508 1515
1509 1516 def show_changeset(ui, repo, opts, buffered=False):
1510 1517 """show one changeset using template or regular display.
1511 1518
1512 1519 Display format will be the first non-empty hit of:
1513 1520 1. option 'template'
1514 1521 2. option 'style'
1515 1522 3. [ui] setting 'logtemplate'
1516 1523 4. [ui] setting 'style'
1517 1524 If all of these values are either the unset or the empty string,
1518 1525 regular display via changeset_printer() is done.
1519 1526 """
1520 1527 # options
1521 1528 matchfn = None
1522 1529 if opts.get('patch') or opts.get('stat'):
1523 1530 matchfn = scmutil.matchall(repo)
1524 1531
1525 1532 if opts.get('template') == 'json':
1526 1533 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1527 1534
1528 1535 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1529 1536
1530 1537 if not tmpl and not mapfile:
1531 1538 return changeset_printer(ui, repo, matchfn, opts, buffered)
1532 1539
1533 1540 try:
1534 1541 t = changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile,
1535 1542 buffered)
1536 1543 except SyntaxError as inst:
1537 1544 raise util.Abort(inst.args[0])
1538 1545 return t
1539 1546
1540 1547 def showmarker(ui, marker):
1541 1548 """utility function to display obsolescence marker in a readable way
1542 1549
1543 1550 To be used by debug function."""
1544 1551 ui.write(hex(marker.precnode()))
1545 1552 for repl in marker.succnodes():
1546 1553 ui.write(' ')
1547 1554 ui.write(hex(repl))
1548 1555 ui.write(' %X ' % marker.flags())
1549 1556 parents = marker.parentnodes()
1550 1557 if parents is not None:
1551 1558 ui.write('{%s} ' % ', '.join(hex(p) for p in parents))
1552 1559 ui.write('(%s) ' % util.datestr(marker.date()))
1553 1560 ui.write('{%s}' % (', '.join('%r: %r' % t for t in
1554 1561 sorted(marker.metadata().items())
1555 1562 if t[0] != 'date')))
1556 1563 ui.write('\n')
1557 1564
1558 1565 def finddate(ui, repo, date):
1559 1566 """Find the tipmost changeset that matches the given date spec"""
1560 1567
1561 1568 df = util.matchdate(date)
1562 1569 m = scmutil.matchall(repo)
1563 1570 results = {}
1564 1571
1565 1572 def prep(ctx, fns):
1566 1573 d = ctx.date()
1567 1574 if df(d[0]):
1568 1575 results[ctx.rev()] = d
1569 1576
1570 1577 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1571 1578 rev = ctx.rev()
1572 1579 if rev in results:
1573 1580 ui.status(_("found revision %s from %s\n") %
1574 1581 (rev, util.datestr(results[rev])))
1575 1582 return str(rev)
1576 1583
1577 1584 raise util.Abort(_("revision matching date not found"))
1578 1585
1579 1586 def increasingwindows(windowsize=8, sizelimit=512):
1580 1587 while True:
1581 1588 yield windowsize
1582 1589 if windowsize < sizelimit:
1583 1590 windowsize *= 2
1584 1591
1585 1592 class FileWalkError(Exception):
1586 1593 pass
1587 1594
1588 1595 def walkfilerevs(repo, match, follow, revs, fncache):
1589 1596 '''Walks the file history for the matched files.
1590 1597
1591 1598 Returns the changeset revs that are involved in the file history.
1592 1599
1593 1600 Throws FileWalkError if the file history can't be walked using
1594 1601 filelogs alone.
1595 1602 '''
1596 1603 wanted = set()
1597 1604 copies = []
1598 1605 minrev, maxrev = min(revs), max(revs)
1599 1606 def filerevgen(filelog, last):
1600 1607 """
1601 1608 Only files, no patterns. Check the history of each file.
1602 1609
1603 1610 Examines filelog entries within minrev, maxrev linkrev range
1604 1611 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1605 1612 tuples in backwards order
1606 1613 """
1607 1614 cl_count = len(repo)
1608 1615 revs = []
1609 1616 for j in xrange(0, last + 1):
1610 1617 linkrev = filelog.linkrev(j)
1611 1618 if linkrev < minrev:
1612 1619 continue
1613 1620 # only yield rev for which we have the changelog, it can
1614 1621 # happen while doing "hg log" during a pull or commit
1615 1622 if linkrev >= cl_count:
1616 1623 break
1617 1624
1618 1625 parentlinkrevs = []
1619 1626 for p in filelog.parentrevs(j):
1620 1627 if p != nullrev:
1621 1628 parentlinkrevs.append(filelog.linkrev(p))
1622 1629 n = filelog.node(j)
1623 1630 revs.append((linkrev, parentlinkrevs,
1624 1631 follow and filelog.renamed(n)))
1625 1632
1626 1633 return reversed(revs)
1627 1634 def iterfiles():
1628 1635 pctx = repo['.']
1629 1636 for filename in match.files():
1630 1637 if follow:
1631 1638 if filename not in pctx:
1632 1639 raise util.Abort(_('cannot follow file not in parent '
1633 1640 'revision: "%s"') % filename)
1634 1641 yield filename, pctx[filename].filenode()
1635 1642 else:
1636 1643 yield filename, None
1637 1644 for filename_node in copies:
1638 1645 yield filename_node
1639 1646
1640 1647 for file_, node in iterfiles():
1641 1648 filelog = repo.file(file_)
1642 1649 if not len(filelog):
1643 1650 if node is None:
1644 1651 # A zero count may be a directory or deleted file, so
1645 1652 # try to find matching entries on the slow path.
1646 1653 if follow:
1647 1654 raise util.Abort(
1648 1655 _('cannot follow nonexistent file: "%s"') % file_)
1649 1656 raise FileWalkError("Cannot walk via filelog")
1650 1657 else:
1651 1658 continue
1652 1659
1653 1660 if node is None:
1654 1661 last = len(filelog) - 1
1655 1662 else:
1656 1663 last = filelog.rev(node)
1657 1664
1658 1665 # keep track of all ancestors of the file
1659 1666 ancestors = set([filelog.linkrev(last)])
1660 1667
1661 1668 # iterate from latest to oldest revision
1662 1669 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1663 1670 if not follow:
1664 1671 if rev > maxrev:
1665 1672 continue
1666 1673 else:
1667 1674 # Note that last might not be the first interesting
1668 1675 # rev to us:
1669 1676 # if the file has been changed after maxrev, we'll
1670 1677 # have linkrev(last) > maxrev, and we still need
1671 1678 # to explore the file graph
1672 1679 if rev not in ancestors:
1673 1680 continue
1674 1681 # XXX insert 1327 fix here
1675 1682 if flparentlinkrevs:
1676 1683 ancestors.update(flparentlinkrevs)
1677 1684
1678 1685 fncache.setdefault(rev, []).append(file_)
1679 1686 wanted.add(rev)
1680 1687 if copied:
1681 1688 copies.append(copied)
1682 1689
1683 1690 return wanted
1684 1691
1685 1692 class _followfilter(object):
1686 1693 def __init__(self, repo, onlyfirst=False):
1687 1694 self.repo = repo
1688 1695 self.startrev = nullrev
1689 1696 self.roots = set()
1690 1697 self.onlyfirst = onlyfirst
1691 1698
1692 1699 def match(self, rev):
1693 1700 def realparents(rev):
1694 1701 if self.onlyfirst:
1695 1702 return self.repo.changelog.parentrevs(rev)[0:1]
1696 1703 else:
1697 1704 return filter(lambda x: x != nullrev,
1698 1705 self.repo.changelog.parentrevs(rev))
1699 1706
1700 1707 if self.startrev == nullrev:
1701 1708 self.startrev = rev
1702 1709 return True
1703 1710
1704 1711 if rev > self.startrev:
1705 1712 # forward: all descendants
1706 1713 if not self.roots:
1707 1714 self.roots.add(self.startrev)
1708 1715 for parent in realparents(rev):
1709 1716 if parent in self.roots:
1710 1717 self.roots.add(rev)
1711 1718 return True
1712 1719 else:
1713 1720 # backwards: all parents
1714 1721 if not self.roots:
1715 1722 self.roots.update(realparents(self.startrev))
1716 1723 if rev in self.roots:
1717 1724 self.roots.remove(rev)
1718 1725 self.roots.update(realparents(rev))
1719 1726 return True
1720 1727
1721 1728 return False
1722 1729
1723 1730 def walkchangerevs(repo, match, opts, prepare):
1724 1731 '''Iterate over files and the revs in which they changed.
1725 1732
1726 1733 Callers most commonly need to iterate backwards over the history
1727 1734 in which they are interested. Doing so has awful (quadratic-looking)
1728 1735 performance, so we use iterators in a "windowed" way.
1729 1736
1730 1737 We walk a window of revisions in the desired order. Within the
1731 1738 window, we first walk forwards to gather data, then in the desired
1732 1739 order (usually backwards) to display it.
1733 1740
1734 1741 This function returns an iterator yielding contexts. Before
1735 1742 yielding each context, the iterator will first call the prepare
1736 1743 function on each context in the window in forward order.'''
1737 1744
1738 1745 follow = opts.get('follow') or opts.get('follow_first')
1739 1746 revs = _logrevs(repo, opts)
1740 1747 if not revs:
1741 1748 return []
1742 1749 wanted = set()
1743 1750 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1744 1751 opts.get('removed'))
1745 1752 fncache = {}
1746 1753 change = repo.changectx
1747 1754
1748 1755 # First step is to fill wanted, the set of revisions that we want to yield.
1749 1756 # When it does not induce extra cost, we also fill fncache for revisions in
1750 1757 # wanted: a cache of filenames that were changed (ctx.files()) and that
1751 1758 # match the file filtering conditions.
1752 1759
1753 1760 if match.always():
1754 1761 # No files, no patterns. Display all revs.
1755 1762 wanted = revs
1756 1763 elif not slowpath:
1757 1764 # We only have to read through the filelog to find wanted revisions
1758 1765
1759 1766 try:
1760 1767 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1761 1768 except FileWalkError:
1762 1769 slowpath = True
1763 1770
1764 1771 # We decided to fall back to the slowpath because at least one
1765 1772 # of the paths was not a file. Check to see if at least one of them
1766 1773 # existed in history, otherwise simply return
1767 1774 for path in match.files():
1768 1775 if path == '.' or path in repo.store:
1769 1776 break
1770 1777 else:
1771 1778 return []
1772 1779
1773 1780 if slowpath:
1774 1781 # We have to read the changelog to match filenames against
1775 1782 # changed files
1776 1783
1777 1784 if follow:
1778 1785 raise util.Abort(_('can only follow copies/renames for explicit '
1779 1786 'filenames'))
1780 1787
1781 1788 # The slow path checks files modified in every changeset.
1782 1789 # This is really slow on large repos, so compute the set lazily.
1783 1790 class lazywantedset(object):
1784 1791 def __init__(self):
1785 1792 self.set = set()
1786 1793 self.revs = set(revs)
1787 1794
1788 1795 # No need to worry about locality here because it will be accessed
1789 1796 # in the same order as the increasing window below.
1790 1797 def __contains__(self, value):
1791 1798 if value in self.set:
1792 1799 return True
1793 1800 elif not value in self.revs:
1794 1801 return False
1795 1802 else:
1796 1803 self.revs.discard(value)
1797 1804 ctx = change(value)
1798 1805 matches = filter(match, ctx.files())
1799 1806 if matches:
1800 1807 fncache[value] = matches
1801 1808 self.set.add(value)
1802 1809 return True
1803 1810 return False
1804 1811
1805 1812 def discard(self, value):
1806 1813 self.revs.discard(value)
1807 1814 self.set.discard(value)
1808 1815
1809 1816 wanted = lazywantedset()
1810 1817
1811 1818 # it might be worthwhile to do this in the iterator if the rev range
1812 1819 # is descending and the prune args are all within that range
1813 1820 for rev in opts.get('prune', ()):
1814 1821 rev = repo[rev].rev()
1815 1822 ff = _followfilter(repo)
1816 1823 stop = min(revs[0], revs[-1])
1817 1824 for x in xrange(rev, stop - 1, -1):
1818 1825 if ff.match(x):
1819 1826 wanted = wanted - [x]
1820 1827
1821 1828 # Now that wanted is correctly initialized, we can iterate over the
1822 1829 # revision range, yielding only revisions in wanted.
1823 1830 def iterate():
1824 1831 if follow and match.always():
1825 1832 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1826 1833 def want(rev):
1827 1834 return ff.match(rev) and rev in wanted
1828 1835 else:
1829 1836 def want(rev):
1830 1837 return rev in wanted
1831 1838
1832 1839 it = iter(revs)
1833 1840 stopiteration = False
1834 1841 for windowsize in increasingwindows():
1835 1842 nrevs = []
1836 1843 for i in xrange(windowsize):
1837 1844 rev = next(it, None)
1838 1845 if rev is None:
1839 1846 stopiteration = True
1840 1847 break
1841 1848 elif want(rev):
1842 1849 nrevs.append(rev)
1843 1850 for rev in sorted(nrevs):
1844 1851 fns = fncache.get(rev)
1845 1852 ctx = change(rev)
1846 1853 if not fns:
1847 1854 def fns_generator():
1848 1855 for f in ctx.files():
1849 1856 if match(f):
1850 1857 yield f
1851 1858 fns = fns_generator()
1852 1859 prepare(ctx, fns)
1853 1860 for rev in nrevs:
1854 1861 yield change(rev)
1855 1862
1856 1863 if stopiteration:
1857 1864 break
1858 1865
1859 1866 return iterate()
1860 1867
1861 1868 def _makefollowlogfilematcher(repo, files, followfirst):
1862 1869 # When displaying a revision with --patch --follow FILE, we have
1863 1870 # to know which file of the revision must be diffed. With
1864 1871 # --follow, we want the names of the ancestors of FILE in the
1865 1872 # revision, stored in "fcache". "fcache" is populated by
1866 1873 # reproducing the graph traversal already done by --follow revset
1867 1874 # and relating linkrevs to file names (which is not "correct" but
1868 1875 # good enough).
1869 1876 fcache = {}
1870 1877 fcacheready = [False]
1871 1878 pctx = repo['.']
1872 1879
1873 1880 def populate():
1874 1881 for fn in files:
1875 1882 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
1876 1883 for c in i:
1877 1884 fcache.setdefault(c.linkrev(), set()).add(c.path())
1878 1885
1879 1886 def filematcher(rev):
1880 1887 if not fcacheready[0]:
1881 1888 # Lazy initialization
1882 1889 fcacheready[0] = True
1883 1890 populate()
1884 1891 return scmutil.matchfiles(repo, fcache.get(rev, []))
1885 1892
1886 1893 return filematcher
1887 1894
1888 1895 def _makenofollowlogfilematcher(repo, pats, opts):
1889 1896 '''hook for extensions to override the filematcher for non-follow cases'''
1890 1897 return None
1891 1898
1892 1899 def _makelogrevset(repo, pats, opts, revs):
1893 1900 """Return (expr, filematcher) where expr is a revset string built
1894 1901 from log options and file patterns or None. If --stat or --patch
1895 1902 are not passed filematcher is None. Otherwise it is a callable
1896 1903 taking a revision number and returning a match objects filtering
1897 1904 the files to be detailed when displaying the revision.
1898 1905 """
1899 1906 opt2revset = {
1900 1907 'no_merges': ('not merge()', None),
1901 1908 'only_merges': ('merge()', None),
1902 1909 '_ancestors': ('ancestors(%(val)s)', None),
1903 1910 '_fancestors': ('_firstancestors(%(val)s)', None),
1904 1911 '_descendants': ('descendants(%(val)s)', None),
1905 1912 '_fdescendants': ('_firstdescendants(%(val)s)', None),
1906 1913 '_matchfiles': ('_matchfiles(%(val)s)', None),
1907 1914 'date': ('date(%(val)r)', None),
1908 1915 'branch': ('branch(%(val)r)', ' or '),
1909 1916 '_patslog': ('filelog(%(val)r)', ' or '),
1910 1917 '_patsfollow': ('follow(%(val)r)', ' or '),
1911 1918 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
1912 1919 'keyword': ('keyword(%(val)r)', ' or '),
1913 1920 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
1914 1921 'user': ('user(%(val)r)', ' or '),
1915 1922 }
1916 1923
1917 1924 opts = dict(opts)
1918 1925 # follow or not follow?
1919 1926 follow = opts.get('follow') or opts.get('follow_first')
1920 1927 if opts.get('follow_first'):
1921 1928 followfirst = 1
1922 1929 else:
1923 1930 followfirst = 0
1924 1931 # --follow with FILE behavior depends on revs...
1925 1932 it = iter(revs)
1926 1933 startrev = it.next()
1927 1934 followdescendants = startrev < next(it, startrev)
1928 1935
1929 1936 # branch and only_branch are really aliases and must be handled at
1930 1937 # the same time
1931 1938 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
1932 1939 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
1933 1940 # pats/include/exclude are passed to match.match() directly in
1934 1941 # _matchfiles() revset but walkchangerevs() builds its matcher with
1935 1942 # scmutil.match(). The difference is input pats are globbed on
1936 1943 # platforms without shell expansion (windows).
1937 1944 wctx = repo[None]
1938 1945 match, pats = scmutil.matchandpats(wctx, pats, opts)
1939 1946 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1940 1947 opts.get('removed'))
1941 1948 if not slowpath:
1942 1949 for f in match.files():
1943 1950 if follow and f not in wctx:
1944 1951 # If the file exists, it may be a directory, so let it
1945 1952 # take the slow path.
1946 1953 if os.path.exists(repo.wjoin(f)):
1947 1954 slowpath = True
1948 1955 continue
1949 1956 else:
1950 1957 raise util.Abort(_('cannot follow file not in parent '
1951 1958 'revision: "%s"') % f)
1952 1959 filelog = repo.file(f)
1953 1960 if not filelog:
1954 1961 # A zero count may be a directory or deleted file, so
1955 1962 # try to find matching entries on the slow path.
1956 1963 if follow:
1957 1964 raise util.Abort(
1958 1965 _('cannot follow nonexistent file: "%s"') % f)
1959 1966 slowpath = True
1960 1967
1961 1968 # We decided to fall back to the slowpath because at least one
1962 1969 # of the paths was not a file. Check to see if at least one of them
1963 1970 # existed in history - in that case, we'll continue down the
1964 1971 # slowpath; otherwise, we can turn off the slowpath
1965 1972 if slowpath:
1966 1973 for path in match.files():
1967 1974 if path == '.' or path in repo.store:
1968 1975 break
1969 1976 else:
1970 1977 slowpath = False
1971 1978
1972 1979 fpats = ('_patsfollow', '_patsfollowfirst')
1973 1980 fnopats = (('_ancestors', '_fancestors'),
1974 1981 ('_descendants', '_fdescendants'))
1975 1982 if slowpath:
1976 1983 # See walkchangerevs() slow path.
1977 1984 #
1978 1985 # pats/include/exclude cannot be represented as separate
1979 1986 # revset expressions as their filtering logic applies at file
1980 1987 # level. For instance "-I a -X a" matches a revision touching
1981 1988 # "a" and "b" while "file(a) and not file(b)" does
1982 1989 # not. Besides, filesets are evaluated against the working
1983 1990 # directory.
1984 1991 matchargs = ['r:', 'd:relpath']
1985 1992 for p in pats:
1986 1993 matchargs.append('p:' + p)
1987 1994 for p in opts.get('include', []):
1988 1995 matchargs.append('i:' + p)
1989 1996 for p in opts.get('exclude', []):
1990 1997 matchargs.append('x:' + p)
1991 1998 matchargs = ','.join(('%r' % p) for p in matchargs)
1992 1999 opts['_matchfiles'] = matchargs
1993 2000 if follow:
1994 2001 opts[fnopats[0][followfirst]] = '.'
1995 2002 else:
1996 2003 if follow:
1997 2004 if pats:
1998 2005 # follow() revset interprets its file argument as a
1999 2006 # manifest entry, so use match.files(), not pats.
2000 2007 opts[fpats[followfirst]] = list(match.files())
2001 2008 else:
2002 2009 op = fnopats[followdescendants][followfirst]
2003 2010 opts[op] = 'rev(%d)' % startrev
2004 2011 else:
2005 2012 opts['_patslog'] = list(pats)
2006 2013
2007 2014 filematcher = None
2008 2015 if opts.get('patch') or opts.get('stat'):
2009 2016 # When following files, track renames via a special matcher.
2010 2017 # If we're forced to take the slowpath it means we're following
2011 2018 # at least one pattern/directory, so don't bother with rename tracking.
2012 2019 if follow and not match.always() and not slowpath:
2013 2020 # _makefollowlogfilematcher expects its files argument to be
2014 2021 # relative to the repo root, so use match.files(), not pats.
2015 2022 filematcher = _makefollowlogfilematcher(repo, match.files(),
2016 2023 followfirst)
2017 2024 else:
2018 2025 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2019 2026 if filematcher is None:
2020 2027 filematcher = lambda rev: match
2021 2028
2022 2029 expr = []
2023 2030 for op, val in sorted(opts.iteritems()):
2024 2031 if not val:
2025 2032 continue
2026 2033 if op not in opt2revset:
2027 2034 continue
2028 2035 revop, andor = opt2revset[op]
2029 2036 if '%(val)' not in revop:
2030 2037 expr.append(revop)
2031 2038 else:
2032 2039 if not isinstance(val, list):
2033 2040 e = revop % {'val': val}
2034 2041 else:
2035 2042 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2036 2043 expr.append(e)
2037 2044
2038 2045 if expr:
2039 2046 expr = '(' + ' and '.join(expr) + ')'
2040 2047 else:
2041 2048 expr = None
2042 2049 return expr, filematcher
2043 2050
2044 2051 def _logrevs(repo, opts):
2045 2052 # Default --rev value depends on --follow but --follow behavior
2046 2053 # depends on revisions resolved from --rev...
2047 2054 follow = opts.get('follow') or opts.get('follow_first')
2048 2055 if opts.get('rev'):
2049 2056 revs = scmutil.revrange(repo, opts['rev'])
2050 2057 elif follow and repo.dirstate.p1() == nullid:
2051 2058 revs = revset.baseset()
2052 2059 elif follow:
2053 2060 revs = repo.revs('reverse(:.)')
2054 2061 else:
2055 2062 revs = revset.spanset(repo)
2056 2063 revs.reverse()
2057 2064 return revs
2058 2065
2059 2066 def getgraphlogrevs(repo, pats, opts):
2060 2067 """Return (revs, expr, filematcher) where revs is an iterable of
2061 2068 revision numbers, expr is a revset string built from log options
2062 2069 and file patterns or None, and used to filter 'revs'. If --stat or
2063 2070 --patch are not passed filematcher is None. Otherwise it is a
2064 2071 callable taking a revision number and returning a match objects
2065 2072 filtering the files to be detailed when displaying the revision.
2066 2073 """
2067 2074 limit = loglimit(opts)
2068 2075 revs = _logrevs(repo, opts)
2069 2076 if not revs:
2070 2077 return revset.baseset(), None, None
2071 2078 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2072 2079 if opts.get('rev'):
2073 2080 # User-specified revs might be unsorted, but don't sort before
2074 2081 # _makelogrevset because it might depend on the order of revs
2075 2082 revs.sort(reverse=True)
2076 2083 if expr:
2077 2084 # Revset matchers often operate faster on revisions in changelog
2078 2085 # order, because most filters deal with the changelog.
2079 2086 revs.reverse()
2080 2087 matcher = revset.match(repo.ui, expr)
2081 2088 # Revset matches can reorder revisions. "A or B" typically returns
2082 2089 # returns the revision matching A then the revision matching B. Sort
2083 2090 # again to fix that.
2084 2091 revs = matcher(repo, revs)
2085 2092 revs.sort(reverse=True)
2086 2093 if limit is not None:
2087 2094 limitedrevs = []
2088 2095 for idx, rev in enumerate(revs):
2089 2096 if idx >= limit:
2090 2097 break
2091 2098 limitedrevs.append(rev)
2092 2099 revs = revset.baseset(limitedrevs)
2093 2100
2094 2101 return revs, expr, filematcher
2095 2102
2096 2103 def getlogrevs(repo, pats, opts):
2097 2104 """Return (revs, expr, filematcher) where revs is an iterable of
2098 2105 revision numbers, expr is a revset string built from log options
2099 2106 and file patterns or None, and used to filter 'revs'. If --stat or
2100 2107 --patch are not passed filematcher is None. Otherwise it is a
2101 2108 callable taking a revision number and returning a match objects
2102 2109 filtering the files to be detailed when displaying the revision.
2103 2110 """
2104 2111 limit = loglimit(opts)
2105 2112 revs = _logrevs(repo, opts)
2106 2113 if not revs:
2107 2114 return revset.baseset([]), None, None
2108 2115 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2109 2116 if expr:
2110 2117 # Revset matchers often operate faster on revisions in changelog
2111 2118 # order, because most filters deal with the changelog.
2112 2119 if not opts.get('rev'):
2113 2120 revs.reverse()
2114 2121 matcher = revset.match(repo.ui, expr)
2115 2122 # Revset matches can reorder revisions. "A or B" typically returns
2116 2123 # returns the revision matching A then the revision matching B. Sort
2117 2124 # again to fix that.
2118 2125 revs = matcher(repo, revs)
2119 2126 if not opts.get('rev'):
2120 2127 revs.sort(reverse=True)
2121 2128 if limit is not None:
2122 2129 limitedrevs = []
2123 2130 for idx, r in enumerate(revs):
2124 2131 if limit <= idx:
2125 2132 break
2126 2133 limitedrevs.append(r)
2127 2134 revs = revset.baseset(limitedrevs)
2128 2135
2129 2136 return revs, expr, filematcher
2130 2137
2131 2138 def displaygraph(ui, dag, displayer, showparents, edgefn, getrenamed=None,
2132 2139 filematcher=None):
2133 2140 seen, state = [], graphmod.asciistate()
2134 2141 for rev, type, ctx, parents in dag:
2135 2142 char = 'o'
2136 2143 if ctx.node() in showparents:
2137 2144 char = '@'
2138 2145 elif ctx.obsolete():
2139 2146 char = 'x'
2140 2147 elif ctx.closesbranch():
2141 2148 char = '_'
2142 2149 copies = None
2143 2150 if getrenamed and ctx.rev():
2144 2151 copies = []
2145 2152 for fn in ctx.files():
2146 2153 rename = getrenamed(fn, ctx.rev())
2147 2154 if rename:
2148 2155 copies.append((fn, rename[0]))
2149 2156 revmatchfn = None
2150 2157 if filematcher is not None:
2151 2158 revmatchfn = filematcher(ctx.rev())
2152 2159 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2153 2160 lines = displayer.hunk.pop(rev).split('\n')
2154 2161 if not lines[-1]:
2155 2162 del lines[-1]
2156 2163 displayer.flush(ctx)
2157 2164 edges = edgefn(type, char, lines, seen, rev, parents)
2158 2165 for type, char, lines, coldata in edges:
2159 2166 graphmod.ascii(ui, state, type, char, lines, coldata)
2160 2167 displayer.close()
2161 2168
2162 2169 def graphlog(ui, repo, *pats, **opts):
2163 2170 # Parameters are identical to log command ones
2164 2171 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2165 2172 revdag = graphmod.dagwalker(repo, revs)
2166 2173
2167 2174 getrenamed = None
2168 2175 if opts.get('copies'):
2169 2176 endrev = None
2170 2177 if opts.get('rev'):
2171 2178 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2172 2179 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2173 2180 displayer = show_changeset(ui, repo, opts, buffered=True)
2174 2181 showparents = [ctx.node() for ctx in repo[None].parents()]
2175 2182 displaygraph(ui, revdag, displayer, showparents,
2176 2183 graphmod.asciiedges, getrenamed, filematcher)
2177 2184
2178 2185 def checkunsupportedgraphflags(pats, opts):
2179 2186 for op in ["newest_first"]:
2180 2187 if op in opts and opts[op]:
2181 2188 raise util.Abort(_("-G/--graph option is incompatible with --%s")
2182 2189 % op.replace("_", "-"))
2183 2190
2184 2191 def graphrevs(repo, nodes, opts):
2185 2192 limit = loglimit(opts)
2186 2193 nodes.reverse()
2187 2194 if limit is not None:
2188 2195 nodes = nodes[:limit]
2189 2196 return graphmod.nodes(repo, nodes)
2190 2197
2191 2198 def add(ui, repo, match, prefix, explicitonly, **opts):
2192 2199 join = lambda f: os.path.join(prefix, f)
2193 2200 bad = []
2194 2201
2195 2202 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2196 2203 names = []
2197 2204 wctx = repo[None]
2198 2205 cca = None
2199 2206 abort, warn = scmutil.checkportabilityalert(ui)
2200 2207 if abort or warn:
2201 2208 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2202 2209
2203 2210 badmatch = matchmod.badmatch(match, badfn)
2204 2211 dirstate = repo.dirstate
2205 2212 # We don't want to just call wctx.walk here, since it would return a lot of
2206 2213 # clean files, which we aren't interested in and takes time.
2207 2214 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2208 2215 True, False, full=False)):
2209 2216 exact = match.exact(f)
2210 2217 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2211 2218 if cca:
2212 2219 cca(f)
2213 2220 names.append(f)
2214 2221 if ui.verbose or not exact:
2215 2222 ui.status(_('adding %s\n') % match.rel(f))
2216 2223
2217 2224 for subpath in sorted(wctx.substate):
2218 2225 sub = wctx.sub(subpath)
2219 2226 try:
2220 2227 submatch = matchmod.narrowmatcher(subpath, match)
2221 2228 if opts.get('subrepos'):
2222 2229 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2223 2230 else:
2224 2231 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2225 2232 except error.LookupError:
2226 2233 ui.status(_("skipping missing subrepository: %s\n")
2227 2234 % join(subpath))
2228 2235
2229 2236 if not opts.get('dry_run'):
2230 2237 rejected = wctx.add(names, prefix)
2231 2238 bad.extend(f for f in rejected if f in match.files())
2232 2239 return bad
2233 2240
2234 2241 def forget(ui, repo, match, prefix, explicitonly):
2235 2242 join = lambda f: os.path.join(prefix, f)
2236 2243 bad = []
2237 2244 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2238 2245 wctx = repo[None]
2239 2246 forgot = []
2240 2247
2241 2248 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2242 2249 forget = sorted(s[0] + s[1] + s[3] + s[6])
2243 2250 if explicitonly:
2244 2251 forget = [f for f in forget if match.exact(f)]
2245 2252
2246 2253 for subpath in sorted(wctx.substate):
2247 2254 sub = wctx.sub(subpath)
2248 2255 try:
2249 2256 submatch = matchmod.narrowmatcher(subpath, match)
2250 2257 subbad, subforgot = sub.forget(submatch, prefix)
2251 2258 bad.extend([subpath + '/' + f for f in subbad])
2252 2259 forgot.extend([subpath + '/' + f for f in subforgot])
2253 2260 except error.LookupError:
2254 2261 ui.status(_("skipping missing subrepository: %s\n")
2255 2262 % join(subpath))
2256 2263
2257 2264 if not explicitonly:
2258 2265 for f in match.files():
2259 2266 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2260 2267 if f not in forgot:
2261 2268 if repo.wvfs.exists(f):
2262 2269 # Don't complain if the exact case match wasn't given.
2263 2270 # But don't do this until after checking 'forgot', so
2264 2271 # that subrepo files aren't normalized, and this op is
2265 2272 # purely from data cached by the status walk above.
2266 2273 if repo.dirstate.normalize(f) in repo.dirstate:
2267 2274 continue
2268 2275 ui.warn(_('not removing %s: '
2269 2276 'file is already untracked\n')
2270 2277 % match.rel(f))
2271 2278 bad.append(f)
2272 2279
2273 2280 for f in forget:
2274 2281 if ui.verbose or not match.exact(f):
2275 2282 ui.status(_('removing %s\n') % match.rel(f))
2276 2283
2277 2284 rejected = wctx.forget(forget, prefix)
2278 2285 bad.extend(f for f in rejected if f in match.files())
2279 2286 forgot.extend(f for f in forget if f not in rejected)
2280 2287 return bad, forgot
2281 2288
2282 2289 def files(ui, ctx, m, fm, fmt, subrepos):
2283 2290 rev = ctx.rev()
2284 2291 ret = 1
2285 2292 ds = ctx.repo().dirstate
2286 2293
2287 2294 for f in ctx.matches(m):
2288 2295 if rev is None and ds[f] == 'r':
2289 2296 continue
2290 2297 fm.startitem()
2291 2298 if ui.verbose:
2292 2299 fc = ctx[f]
2293 2300 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2294 2301 fm.data(abspath=f)
2295 2302 fm.write('path', fmt, m.rel(f))
2296 2303 ret = 0
2297 2304
2298 2305 for subpath in sorted(ctx.substate):
2299 2306 def matchessubrepo(subpath):
2300 2307 return (m.always() or m.exact(subpath)
2301 2308 or any(f.startswith(subpath + '/') for f in m.files()))
2302 2309
2303 2310 if subrepos or matchessubrepo(subpath):
2304 2311 sub = ctx.sub(subpath)
2305 2312 try:
2306 2313 submatch = matchmod.narrowmatcher(subpath, m)
2307 2314 if sub.printfiles(ui, submatch, fm, fmt, subrepos) == 0:
2308 2315 ret = 0
2309 2316 except error.LookupError:
2310 2317 ui.status(_("skipping missing subrepository: %s\n")
2311 2318 % m.abs(subpath))
2312 2319
2313 2320 return ret
2314 2321
2315 2322 def remove(ui, repo, m, prefix, after, force, subrepos):
2316 2323 join = lambda f: os.path.join(prefix, f)
2317 2324 ret = 0
2318 2325 s = repo.status(match=m, clean=True)
2319 2326 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2320 2327
2321 2328 wctx = repo[None]
2322 2329
2323 2330 for subpath in sorted(wctx.substate):
2324 2331 def matchessubrepo(matcher, subpath):
2325 2332 if matcher.exact(subpath):
2326 2333 return True
2327 2334 for f in matcher.files():
2328 2335 if f.startswith(subpath):
2329 2336 return True
2330 2337 return False
2331 2338
2332 2339 if subrepos or matchessubrepo(m, subpath):
2333 2340 sub = wctx.sub(subpath)
2334 2341 try:
2335 2342 submatch = matchmod.narrowmatcher(subpath, m)
2336 2343 if sub.removefiles(submatch, prefix, after, force, subrepos):
2337 2344 ret = 1
2338 2345 except error.LookupError:
2339 2346 ui.status(_("skipping missing subrepository: %s\n")
2340 2347 % join(subpath))
2341 2348
2342 2349 # warn about failure to delete explicit files/dirs
2343 2350 deleteddirs = util.dirs(deleted)
2344 2351 for f in m.files():
2345 2352 def insubrepo():
2346 2353 for subpath in wctx.substate:
2347 2354 if f.startswith(subpath):
2348 2355 return True
2349 2356 return False
2350 2357
2351 2358 isdir = f in deleteddirs or wctx.hasdir(f)
2352 2359 if f in repo.dirstate or isdir or f == '.' or insubrepo():
2353 2360 continue
2354 2361
2355 2362 if repo.wvfs.exists(f):
2356 2363 if repo.wvfs.isdir(f):
2357 2364 ui.warn(_('not removing %s: no tracked files\n')
2358 2365 % m.rel(f))
2359 2366 else:
2360 2367 ui.warn(_('not removing %s: file is untracked\n')
2361 2368 % m.rel(f))
2362 2369 # missing files will generate a warning elsewhere
2363 2370 ret = 1
2364 2371
2365 2372 if force:
2366 2373 list = modified + deleted + clean + added
2367 2374 elif after:
2368 2375 list = deleted
2369 2376 for f in modified + added + clean:
2370 2377 ui.warn(_('not removing %s: file still exists\n') % m.rel(f))
2371 2378 ret = 1
2372 2379 else:
2373 2380 list = deleted + clean
2374 2381 for f in modified:
2375 2382 ui.warn(_('not removing %s: file is modified (use -f'
2376 2383 ' to force removal)\n') % m.rel(f))
2377 2384 ret = 1
2378 2385 for f in added:
2379 2386 ui.warn(_('not removing %s: file has been marked for add'
2380 2387 ' (use forget to undo)\n') % m.rel(f))
2381 2388 ret = 1
2382 2389
2383 2390 for f in sorted(list):
2384 2391 if ui.verbose or not m.exact(f):
2385 2392 ui.status(_('removing %s\n') % m.rel(f))
2386 2393
2387 2394 wlock = repo.wlock()
2388 2395 try:
2389 2396 if not after:
2390 2397 for f in list:
2391 2398 if f in added:
2392 2399 continue # we never unlink added files on remove
2393 2400 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
2394 2401 repo[None].forget(list)
2395 2402 finally:
2396 2403 wlock.release()
2397 2404
2398 2405 return ret
2399 2406
2400 2407 def cat(ui, repo, ctx, matcher, prefix, **opts):
2401 2408 err = 1
2402 2409
2403 2410 def write(path):
2404 2411 fp = makefileobj(repo, opts.get('output'), ctx.node(),
2405 2412 pathname=os.path.join(prefix, path))
2406 2413 data = ctx[path].data()
2407 2414 if opts.get('decode'):
2408 2415 data = repo.wwritedata(path, data)
2409 2416 fp.write(data)
2410 2417 fp.close()
2411 2418
2412 2419 # Automation often uses hg cat on single files, so special case it
2413 2420 # for performance to avoid the cost of parsing the manifest.
2414 2421 if len(matcher.files()) == 1 and not matcher.anypats():
2415 2422 file = matcher.files()[0]
2416 2423 mf = repo.manifest
2417 2424 mfnode = ctx.manifestnode()
2418 2425 if mfnode and mf.find(mfnode, file)[0]:
2419 2426 write(file)
2420 2427 return 0
2421 2428
2422 2429 # Don't warn about "missing" files that are really in subrepos
2423 2430 def badfn(path, msg):
2424 2431 for subpath in ctx.substate:
2425 2432 if path.startswith(subpath):
2426 2433 return
2427 2434 matcher.bad(path, msg)
2428 2435
2429 2436 for abs in ctx.walk(matchmod.badmatch(matcher, badfn)):
2430 2437 write(abs)
2431 2438 err = 0
2432 2439
2433 2440 for subpath in sorted(ctx.substate):
2434 2441 sub = ctx.sub(subpath)
2435 2442 try:
2436 2443 submatch = matchmod.narrowmatcher(subpath, matcher)
2437 2444
2438 2445 if not sub.cat(submatch, os.path.join(prefix, sub._path),
2439 2446 **opts):
2440 2447 err = 0
2441 2448 except error.RepoLookupError:
2442 2449 ui.status(_("skipping missing subrepository: %s\n")
2443 2450 % os.path.join(prefix, subpath))
2444 2451
2445 2452 return err
2446 2453
2447 2454 def commit(ui, repo, commitfunc, pats, opts):
2448 2455 '''commit the specified files or all outstanding changes'''
2449 2456 date = opts.get('date')
2450 2457 if date:
2451 2458 opts['date'] = util.parsedate(date)
2452 2459 message = logmessage(ui, opts)
2453 2460 matcher = scmutil.match(repo[None], pats, opts)
2454 2461
2455 2462 # extract addremove carefully -- this function can be called from a command
2456 2463 # that doesn't support addremove
2457 2464 if opts.get('addremove'):
2458 2465 if scmutil.addremove(repo, matcher, "", opts) != 0:
2459 2466 raise util.Abort(
2460 2467 _("failed to mark all new/missing files as added/removed"))
2461 2468
2462 2469 return commitfunc(ui, repo, message, matcher, opts)
2463 2470
2464 2471 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2465 2472 # avoid cycle context -> subrepo -> cmdutil
2466 2473 import context
2467 2474
2468 2475 # amend will reuse the existing user if not specified, but the obsolete
2469 2476 # marker creation requires that the current user's name is specified.
2470 2477 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2471 2478 ui.username() # raise exception if username not set
2472 2479
2473 2480 ui.note(_('amending changeset %s\n') % old)
2474 2481 base = old.p1()
2475 2482 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2476 2483
2477 2484 wlock = dsguard = lock = newid = None
2478 2485 try:
2479 2486 wlock = repo.wlock()
2480 2487 dsguard = dirstateguard(repo, 'amend')
2481 2488 lock = repo.lock()
2482 2489 tr = repo.transaction('amend')
2483 2490 try:
2484 2491 # See if we got a message from -m or -l, if not, open the editor
2485 2492 # with the message of the changeset to amend
2486 2493 message = logmessage(ui, opts)
2487 2494 # ensure logfile does not conflict with later enforcement of the
2488 2495 # message. potential logfile content has been processed by
2489 2496 # `logmessage` anyway.
2490 2497 opts.pop('logfile')
2491 2498 # First, do a regular commit to record all changes in the working
2492 2499 # directory (if there are any)
2493 2500 ui.callhooks = False
2494 2501 activebookmark = repo._activebookmark
2495 2502 try:
2496 2503 repo._activebookmark = None
2497 2504 opts['message'] = 'temporary amend commit for %s' % old
2498 2505 node = commit(ui, repo, commitfunc, pats, opts)
2499 2506 finally:
2500 2507 repo._activebookmark = activebookmark
2501 2508 ui.callhooks = True
2502 2509 ctx = repo[node]
2503 2510
2504 2511 # Participating changesets:
2505 2512 #
2506 2513 # node/ctx o - new (intermediate) commit that contains changes
2507 2514 # | from working dir to go into amending commit
2508 2515 # | (or a workingctx if there were no changes)
2509 2516 # |
2510 2517 # old o - changeset to amend
2511 2518 # |
2512 2519 # base o - parent of amending changeset
2513 2520
2514 2521 # Update extra dict from amended commit (e.g. to preserve graft
2515 2522 # source)
2516 2523 extra.update(old.extra())
2517 2524
2518 2525 # Also update it from the intermediate commit or from the wctx
2519 2526 extra.update(ctx.extra())
2520 2527
2521 2528 if len(old.parents()) > 1:
2522 2529 # ctx.files() isn't reliable for merges, so fall back to the
2523 2530 # slower repo.status() method
2524 2531 files = set([fn for st in repo.status(base, old)[:3]
2525 2532 for fn in st])
2526 2533 else:
2527 2534 files = set(old.files())
2528 2535
2529 2536 # Second, we use either the commit we just did, or if there were no
2530 2537 # changes the parent of the working directory as the version of the
2531 2538 # files in the final amend commit
2532 2539 if node:
2533 2540 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2534 2541
2535 2542 user = ctx.user()
2536 2543 date = ctx.date()
2537 2544 # Recompute copies (avoid recording a -> b -> a)
2538 2545 copied = copies.pathcopies(base, ctx)
2539 2546 if old.p2:
2540 2547 copied.update(copies.pathcopies(old.p2(), ctx))
2541 2548
2542 2549 # Prune files which were reverted by the updates: if old
2543 2550 # introduced file X and our intermediate commit, node,
2544 2551 # renamed that file, then those two files are the same and
2545 2552 # we can discard X from our list of files. Likewise if X
2546 2553 # was deleted, it's no longer relevant
2547 2554 files.update(ctx.files())
2548 2555
2549 2556 def samefile(f):
2550 2557 if f in ctx.manifest():
2551 2558 a = ctx.filectx(f)
2552 2559 if f in base.manifest():
2553 2560 b = base.filectx(f)
2554 2561 return (not a.cmp(b)
2555 2562 and a.flags() == b.flags())
2556 2563 else:
2557 2564 return False
2558 2565 else:
2559 2566 return f not in base.manifest()
2560 2567 files = [f for f in files if not samefile(f)]
2561 2568
2562 2569 def filectxfn(repo, ctx_, path):
2563 2570 try:
2564 2571 fctx = ctx[path]
2565 2572 flags = fctx.flags()
2566 2573 mctx = context.memfilectx(repo,
2567 2574 fctx.path(), fctx.data(),
2568 2575 islink='l' in flags,
2569 2576 isexec='x' in flags,
2570 2577 copied=copied.get(path))
2571 2578 return mctx
2572 2579 except KeyError:
2573 2580 return None
2574 2581 else:
2575 2582 ui.note(_('copying changeset %s to %s\n') % (old, base))
2576 2583
2577 2584 # Use version of files as in the old cset
2578 2585 def filectxfn(repo, ctx_, path):
2579 2586 try:
2580 2587 return old.filectx(path)
2581 2588 except KeyError:
2582 2589 return None
2583 2590
2584 2591 user = opts.get('user') or old.user()
2585 2592 date = opts.get('date') or old.date()
2586 2593 editform = mergeeditform(old, 'commit.amend')
2587 2594 editor = getcommiteditor(editform=editform, **opts)
2588 2595 if not message:
2589 2596 editor = getcommiteditor(edit=True, editform=editform)
2590 2597 message = old.description()
2591 2598
2592 2599 pureextra = extra.copy()
2593 2600 extra['amend_source'] = old.hex()
2594 2601
2595 2602 new = context.memctx(repo,
2596 2603 parents=[base.node(), old.p2().node()],
2597 2604 text=message,
2598 2605 files=files,
2599 2606 filectxfn=filectxfn,
2600 2607 user=user,
2601 2608 date=date,
2602 2609 extra=extra,
2603 2610 editor=editor)
2604 2611
2605 2612 newdesc = changelog.stripdesc(new.description())
2606 2613 if ((not node)
2607 2614 and newdesc == old.description()
2608 2615 and user == old.user()
2609 2616 and date == old.date()
2610 2617 and pureextra == old.extra()):
2611 2618 # nothing changed. continuing here would create a new node
2612 2619 # anyway because of the amend_source noise.
2613 2620 #
2614 2621 # This not what we expect from amend.
2615 2622 return old.node()
2616 2623
2617 2624 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2618 2625 try:
2619 2626 if opts.get('secret'):
2620 2627 commitphase = 'secret'
2621 2628 else:
2622 2629 commitphase = old.phase()
2623 2630 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2624 2631 newid = repo.commitctx(new)
2625 2632 finally:
2626 2633 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2627 2634 if newid != old.node():
2628 2635 # Reroute the working copy parent to the new changeset
2629 2636 repo.setparents(newid, nullid)
2630 2637
2631 2638 # Move bookmarks from old parent to amend commit
2632 2639 bms = repo.nodebookmarks(old.node())
2633 2640 if bms:
2634 2641 marks = repo._bookmarks
2635 2642 for bm in bms:
2636 2643 ui.debug('moving bookmarks %r from %s to %s\n' %
2637 2644 (marks, old.hex(), hex(newid)))
2638 2645 marks[bm] = newid
2639 2646 marks.recordchange(tr)
2640 2647 #commit the whole amend process
2641 2648 if createmarkers:
2642 2649 # mark the new changeset as successor of the rewritten one
2643 2650 new = repo[newid]
2644 2651 obs = [(old, (new,))]
2645 2652 if node:
2646 2653 obs.append((ctx, ()))
2647 2654
2648 2655 obsolete.createmarkers(repo, obs)
2649 2656 tr.close()
2650 2657 finally:
2651 2658 tr.release()
2652 2659 dsguard.close()
2653 2660 if not createmarkers and newid != old.node():
2654 2661 # Strip the intermediate commit (if there was one) and the amended
2655 2662 # commit
2656 2663 if node:
2657 2664 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2658 2665 ui.note(_('stripping amended changeset %s\n') % old)
2659 2666 repair.strip(ui, repo, old.node(), topic='amend-backup')
2660 2667 finally:
2661 2668 lockmod.release(lock, dsguard, wlock)
2662 2669 return newid
2663 2670
2664 2671 def commiteditor(repo, ctx, subs, editform=''):
2665 2672 if ctx.description():
2666 2673 return ctx.description()
2667 2674 return commitforceeditor(repo, ctx, subs, editform=editform)
2668 2675
2669 2676 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2670 2677 editform=''):
2671 2678 if not extramsg:
2672 2679 extramsg = _("Leave message empty to abort commit.")
2673 2680
2674 2681 forms = [e for e in editform.split('.') if e]
2675 2682 forms.insert(0, 'changeset')
2676 2683 while forms:
2677 2684 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2678 2685 if tmpl:
2679 2686 committext = buildcommittemplate(repo, ctx, subs, extramsg, tmpl)
2680 2687 break
2681 2688 forms.pop()
2682 2689 else:
2683 2690 committext = buildcommittext(repo, ctx, subs, extramsg)
2684 2691
2685 2692 # run editor in the repository root
2686 2693 olddir = os.getcwd()
2687 2694 os.chdir(repo.root)
2688 2695 text = repo.ui.edit(committext, ctx.user(), ctx.extra(), editform=editform)
2689 2696 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2690 2697 os.chdir(olddir)
2691 2698
2692 2699 if finishdesc:
2693 2700 text = finishdesc(text)
2694 2701 if not text.strip():
2695 2702 raise util.Abort(_("empty commit message"))
2696 2703
2697 2704 return text
2698 2705
2699 2706 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2700 2707 ui = repo.ui
2701 2708 tmpl, mapfile = gettemplate(ui, tmpl, None)
2702 2709
2703 2710 try:
2704 2711 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2705 2712 except SyntaxError as inst:
2706 2713 raise util.Abort(inst.args[0])
2707 2714
2708 2715 for k, v in repo.ui.configitems('committemplate'):
2709 2716 if k != 'changeset':
2710 2717 t.t.cache[k] = v
2711 2718
2712 2719 if not extramsg:
2713 2720 extramsg = '' # ensure that extramsg is string
2714 2721
2715 2722 ui.pushbuffer()
2716 2723 t.show(ctx, extramsg=extramsg)
2717 2724 return ui.popbuffer()
2718 2725
2719 2726 def hgprefix(msg):
2720 2727 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2721 2728
2722 2729 def buildcommittext(repo, ctx, subs, extramsg):
2723 2730 edittext = []
2724 2731 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2725 2732 if ctx.description():
2726 2733 edittext.append(ctx.description())
2727 2734 edittext.append("")
2728 2735 edittext.append("") # Empty line between message and comments.
2729 2736 edittext.append(hgprefix(_("Enter commit message."
2730 2737 " Lines beginning with 'HG:' are removed.")))
2731 2738 edittext.append(hgprefix(extramsg))
2732 2739 edittext.append("HG: --")
2733 2740 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2734 2741 if ctx.p2():
2735 2742 edittext.append(hgprefix(_("branch merge")))
2736 2743 if ctx.branch():
2737 2744 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2738 2745 if bookmarks.isactivewdirparent(repo):
2739 2746 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2740 2747 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2741 2748 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2742 2749 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2743 2750 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2744 2751 if not added and not modified and not removed:
2745 2752 edittext.append(hgprefix(_("no files changed")))
2746 2753 edittext.append("")
2747 2754
2748 2755 return "\n".join(edittext)
2749 2756
2750 2757 def commitstatus(repo, node, branch, bheads=None, opts=None):
2751 2758 if opts is None:
2752 2759 opts = {}
2753 2760 ctx = repo[node]
2754 2761 parents = ctx.parents()
2755 2762
2756 2763 if (not opts.get('amend') and bheads and node not in bheads and not
2757 2764 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2758 2765 repo.ui.status(_('created new head\n'))
2759 2766 # The message is not printed for initial roots. For the other
2760 2767 # changesets, it is printed in the following situations:
2761 2768 #
2762 2769 # Par column: for the 2 parents with ...
2763 2770 # N: null or no parent
2764 2771 # B: parent is on another named branch
2765 2772 # C: parent is a regular non head changeset
2766 2773 # H: parent was a branch head of the current branch
2767 2774 # Msg column: whether we print "created new head" message
2768 2775 # In the following, it is assumed that there already exists some
2769 2776 # initial branch heads of the current branch, otherwise nothing is
2770 2777 # printed anyway.
2771 2778 #
2772 2779 # Par Msg Comment
2773 2780 # N N y additional topo root
2774 2781 #
2775 2782 # B N y additional branch root
2776 2783 # C N y additional topo head
2777 2784 # H N n usual case
2778 2785 #
2779 2786 # B B y weird additional branch root
2780 2787 # C B y branch merge
2781 2788 # H B n merge with named branch
2782 2789 #
2783 2790 # C C y additional head from merge
2784 2791 # C H n merge with a head
2785 2792 #
2786 2793 # H H n head merge: head count decreases
2787 2794
2788 2795 if not opts.get('close_branch'):
2789 2796 for r in parents:
2790 2797 if r.closesbranch() and r.branch() == branch:
2791 2798 repo.ui.status(_('reopening closed branch head %d\n') % r)
2792 2799
2793 2800 if repo.ui.debugflag:
2794 2801 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2795 2802 elif repo.ui.verbose:
2796 2803 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2797 2804
2798 2805 def revert(ui, repo, ctx, parents, *pats, **opts):
2799 2806 parent, p2 = parents
2800 2807 node = ctx.node()
2801 2808
2802 2809 mf = ctx.manifest()
2803 2810 if node == p2:
2804 2811 parent = p2
2805 2812 if node == parent:
2806 2813 pmf = mf
2807 2814 else:
2808 2815 pmf = None
2809 2816
2810 2817 # need all matching names in dirstate and manifest of target rev,
2811 2818 # so have to walk both. do not print errors if files exist in one
2812 2819 # but not other. in both cases, filesets should be evaluated against
2813 2820 # workingctx to get consistent result (issue4497). this means 'set:**'
2814 2821 # cannot be used to select missing files from target rev.
2815 2822
2816 2823 # `names` is a mapping for all elements in working copy and target revision
2817 2824 # The mapping is in the form:
2818 2825 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2819 2826 names = {}
2820 2827
2821 2828 wlock = repo.wlock()
2822 2829 try:
2823 2830 ## filling of the `names` mapping
2824 2831 # walk dirstate to fill `names`
2825 2832
2826 2833 interactive = opts.get('interactive', False)
2827 2834 wctx = repo[None]
2828 2835 m = scmutil.match(wctx, pats, opts)
2829 2836
2830 2837 # we'll need this later
2831 2838 targetsubs = sorted(s for s in wctx.substate if m(s))
2832 2839
2833 2840 if not m.always():
2834 2841 for abs in repo.walk(matchmod.badmatch(m, lambda x, y: False)):
2835 2842 names[abs] = m.rel(abs), m.exact(abs)
2836 2843
2837 2844 # walk target manifest to fill `names`
2838 2845
2839 2846 def badfn(path, msg):
2840 2847 if path in names:
2841 2848 return
2842 2849 if path in ctx.substate:
2843 2850 return
2844 2851 path_ = path + '/'
2845 2852 for f in names:
2846 2853 if f.startswith(path_):
2847 2854 return
2848 2855 ui.warn("%s: %s\n" % (m.rel(path), msg))
2849 2856
2850 2857 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
2851 2858 if abs not in names:
2852 2859 names[abs] = m.rel(abs), m.exact(abs)
2853 2860
2854 2861 # Find status of all file in `names`.
2855 2862 m = scmutil.matchfiles(repo, names)
2856 2863
2857 2864 changes = repo.status(node1=node, match=m,
2858 2865 unknown=True, ignored=True, clean=True)
2859 2866 else:
2860 2867 changes = repo.status(node1=node, match=m)
2861 2868 for kind in changes:
2862 2869 for abs in kind:
2863 2870 names[abs] = m.rel(abs), m.exact(abs)
2864 2871
2865 2872 m = scmutil.matchfiles(repo, names)
2866 2873
2867 2874 modified = set(changes.modified)
2868 2875 added = set(changes.added)
2869 2876 removed = set(changes.removed)
2870 2877 _deleted = set(changes.deleted)
2871 2878 unknown = set(changes.unknown)
2872 2879 unknown.update(changes.ignored)
2873 2880 clean = set(changes.clean)
2874 2881 modadded = set()
2875 2882
2876 2883 # split between files known in target manifest and the others
2877 2884 smf = set(mf)
2878 2885
2879 2886 # determine the exact nature of the deleted changesets
2880 2887 deladded = _deleted - smf
2881 2888 deleted = _deleted - deladded
2882 2889
2883 2890 # We need to account for the state of the file in the dirstate,
2884 2891 # even when we revert against something else than parent. This will
2885 2892 # slightly alter the behavior of revert (doing back up or not, delete
2886 2893 # or just forget etc).
2887 2894 if parent == node:
2888 2895 dsmodified = modified
2889 2896 dsadded = added
2890 2897 dsremoved = removed
2891 2898 # store all local modifications, useful later for rename detection
2892 2899 localchanges = dsmodified | dsadded
2893 2900 modified, added, removed = set(), set(), set()
2894 2901 else:
2895 2902 changes = repo.status(node1=parent, match=m)
2896 2903 dsmodified = set(changes.modified)
2897 2904 dsadded = set(changes.added)
2898 2905 dsremoved = set(changes.removed)
2899 2906 # store all local modifications, useful later for rename detection
2900 2907 localchanges = dsmodified | dsadded
2901 2908
2902 2909 # only take into account for removes between wc and target
2903 2910 clean |= dsremoved - removed
2904 2911 dsremoved &= removed
2905 2912 # distinct between dirstate remove and other
2906 2913 removed -= dsremoved
2907 2914
2908 2915 modadded = added & dsmodified
2909 2916 added -= modadded
2910 2917
2911 2918 # tell newly modified apart.
2912 2919 dsmodified &= modified
2913 2920 dsmodified |= modified & dsadded # dirstate added may needs backup
2914 2921 modified -= dsmodified
2915 2922
2916 2923 # We need to wait for some post-processing to update this set
2917 2924 # before making the distinction. The dirstate will be used for
2918 2925 # that purpose.
2919 2926 dsadded = added
2920 2927
2921 2928 # in case of merge, files that are actually added can be reported as
2922 2929 # modified, we need to post process the result
2923 2930 if p2 != nullid:
2924 2931 if pmf is None:
2925 2932 # only need parent manifest in the merge case,
2926 2933 # so do not read by default
2927 2934 pmf = repo[parent].manifest()
2928 2935 mergeadd = dsmodified - set(pmf)
2929 2936 dsadded |= mergeadd
2930 2937 dsmodified -= mergeadd
2931 2938
2932 2939 # if f is a rename, update `names` to also revert the source
2933 2940 cwd = repo.getcwd()
2934 2941 for f in localchanges:
2935 2942 src = repo.dirstate.copied(f)
2936 2943 # XXX should we check for rename down to target node?
2937 2944 if src and src not in names and repo.dirstate[src] == 'r':
2938 2945 dsremoved.add(src)
2939 2946 names[src] = (repo.pathto(src, cwd), True)
2940 2947
2941 2948 # distinguish between file to forget and the other
2942 2949 added = set()
2943 2950 for abs in dsadded:
2944 2951 if repo.dirstate[abs] != 'a':
2945 2952 added.add(abs)
2946 2953 dsadded -= added
2947 2954
2948 2955 for abs in deladded:
2949 2956 if repo.dirstate[abs] == 'a':
2950 2957 dsadded.add(abs)
2951 2958 deladded -= dsadded
2952 2959
2953 2960 # For files marked as removed, we check if an unknown file is present at
2954 2961 # the same path. If a such file exists it may need to be backed up.
2955 2962 # Making the distinction at this stage helps have simpler backup
2956 2963 # logic.
2957 2964 removunk = set()
2958 2965 for abs in removed:
2959 2966 target = repo.wjoin(abs)
2960 2967 if os.path.lexists(target):
2961 2968 removunk.add(abs)
2962 2969 removed -= removunk
2963 2970
2964 2971 dsremovunk = set()
2965 2972 for abs in dsremoved:
2966 2973 target = repo.wjoin(abs)
2967 2974 if os.path.lexists(target):
2968 2975 dsremovunk.add(abs)
2969 2976 dsremoved -= dsremovunk
2970 2977
2971 2978 # action to be actually performed by revert
2972 2979 # (<list of file>, message>) tuple
2973 2980 actions = {'revert': ([], _('reverting %s\n')),
2974 2981 'add': ([], _('adding %s\n')),
2975 2982 'remove': ([], _('removing %s\n')),
2976 2983 'drop': ([], _('removing %s\n')),
2977 2984 'forget': ([], _('forgetting %s\n')),
2978 2985 'undelete': ([], _('undeleting %s\n')),
2979 2986 'noop': (None, _('no changes needed to %s\n')),
2980 2987 'unknown': (None, _('file not managed: %s\n')),
2981 2988 }
2982 2989
2983 2990 # "constant" that convey the backup strategy.
2984 2991 # All set to `discard` if `no-backup` is set do avoid checking
2985 2992 # no_backup lower in the code.
2986 2993 # These values are ordered for comparison purposes
2987 2994 backup = 2 # unconditionally do backup
2988 2995 check = 1 # check if the existing file differs from target
2989 2996 discard = 0 # never do backup
2990 2997 if opts.get('no_backup'):
2991 2998 backup = check = discard
2992 2999
2993 3000 backupanddel = actions['remove']
2994 3001 if not opts.get('no_backup'):
2995 3002 backupanddel = actions['drop']
2996 3003
2997 3004 disptable = (
2998 3005 # dispatch table:
2999 3006 # file state
3000 3007 # action
3001 3008 # make backup
3002 3009
3003 3010 ## Sets that results that will change file on disk
3004 3011 # Modified compared to target, no local change
3005 3012 (modified, actions['revert'], discard),
3006 3013 # Modified compared to target, but local file is deleted
3007 3014 (deleted, actions['revert'], discard),
3008 3015 # Modified compared to target, local change
3009 3016 (dsmodified, actions['revert'], backup),
3010 3017 # Added since target
3011 3018 (added, actions['remove'], discard),
3012 3019 # Added in working directory
3013 3020 (dsadded, actions['forget'], discard),
3014 3021 # Added since target, have local modification
3015 3022 (modadded, backupanddel, backup),
3016 3023 # Added since target but file is missing in working directory
3017 3024 (deladded, actions['drop'], discard),
3018 3025 # Removed since target, before working copy parent
3019 3026 (removed, actions['add'], discard),
3020 3027 # Same as `removed` but an unknown file exists at the same path
3021 3028 (removunk, actions['add'], check),
3022 3029 # Removed since targe, marked as such in working copy parent
3023 3030 (dsremoved, actions['undelete'], discard),
3024 3031 # Same as `dsremoved` but an unknown file exists at the same path
3025 3032 (dsremovunk, actions['undelete'], check),
3026 3033 ## the following sets does not result in any file changes
3027 3034 # File with no modification
3028 3035 (clean, actions['noop'], discard),
3029 3036 # Existing file, not tracked anywhere
3030 3037 (unknown, actions['unknown'], discard),
3031 3038 )
3032 3039
3033 3040 for abs, (rel, exact) in sorted(names.items()):
3034 3041 # target file to be touch on disk (relative to cwd)
3035 3042 target = repo.wjoin(abs)
3036 3043 # search the entry in the dispatch table.
3037 3044 # if the file is in any of these sets, it was touched in the working
3038 3045 # directory parent and we are sure it needs to be reverted.
3039 3046 for table, (xlist, msg), dobackup in disptable:
3040 3047 if abs not in table:
3041 3048 continue
3042 3049 if xlist is not None:
3043 3050 xlist.append(abs)
3044 3051 if dobackup and (backup <= dobackup
3045 3052 or wctx[abs].cmp(ctx[abs])):
3046 3053 bakname = "%s.orig" % rel
3047 3054 ui.note(_('saving current version of %s as %s\n') %
3048 3055 (rel, bakname))
3049 3056 if not opts.get('dry_run'):
3050 3057 if interactive:
3051 3058 util.copyfile(target, bakname)
3052 3059 else:
3053 3060 util.rename(target, bakname)
3054 3061 if ui.verbose or not exact:
3055 3062 if not isinstance(msg, basestring):
3056 3063 msg = msg(abs)
3057 3064 ui.status(msg % rel)
3058 3065 elif exact:
3059 3066 ui.warn(msg % rel)
3060 3067 break
3061 3068
3062 3069 if not opts.get('dry_run'):
3063 3070 needdata = ('revert', 'add', 'undelete')
3064 3071 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3065 3072 _performrevert(repo, parents, ctx, actions, interactive)
3066 3073
3067 3074 if targetsubs:
3068 3075 # Revert the subrepos on the revert list
3069 3076 for sub in targetsubs:
3070 3077 try:
3071 3078 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3072 3079 except KeyError:
3073 3080 raise util.Abort("subrepository '%s' does not exist in %s!"
3074 3081 % (sub, short(ctx.node())))
3075 3082 finally:
3076 3083 wlock.release()
3077 3084
3078 3085 def _revertprefetch(repo, ctx, *files):
3079 3086 """Let extension changing the storage layer prefetch content"""
3080 3087 pass
3081 3088
3082 3089 def _performrevert(repo, parents, ctx, actions, interactive=False):
3083 3090 """function that actually perform all the actions computed for revert
3084 3091
3085 3092 This is an independent function to let extension to plug in and react to
3086 3093 the imminent revert.
3087 3094
3088 3095 Make sure you have the working directory locked when calling this function.
3089 3096 """
3090 3097 parent, p2 = parents
3091 3098 node = ctx.node()
3092 3099 def checkout(f):
3093 3100 fc = ctx[f]
3094 3101 repo.wwrite(f, fc.data(), fc.flags())
3095 3102
3096 3103 audit_path = pathutil.pathauditor(repo.root)
3097 3104 for f in actions['forget'][0]:
3098 3105 repo.dirstate.drop(f)
3099 3106 for f in actions['remove'][0]:
3100 3107 audit_path(f)
3101 3108 try:
3102 3109 util.unlinkpath(repo.wjoin(f))
3103 3110 except OSError:
3104 3111 pass
3105 3112 repo.dirstate.remove(f)
3106 3113 for f in actions['drop'][0]:
3107 3114 audit_path(f)
3108 3115 repo.dirstate.remove(f)
3109 3116
3110 3117 normal = None
3111 3118 if node == parent:
3112 3119 # We're reverting to our parent. If possible, we'd like status
3113 3120 # to report the file as clean. We have to use normallookup for
3114 3121 # merges to avoid losing information about merged/dirty files.
3115 3122 if p2 != nullid:
3116 3123 normal = repo.dirstate.normallookup
3117 3124 else:
3118 3125 normal = repo.dirstate.normal
3119 3126
3120 3127 newlyaddedandmodifiedfiles = set()
3121 3128 if interactive:
3122 3129 # Prompt the user for changes to revert
3123 3130 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3124 3131 m = scmutil.match(ctx, torevert, {})
3125 3132 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3126 3133 diffopts.nodates = True
3127 3134 diffopts.git = True
3128 3135 reversehunks = repo.ui.configbool('experimental',
3129 3136 'revertalternateinteractivemode',
3130 3137 True)
3131 3138 if reversehunks:
3132 3139 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3133 3140 else:
3134 3141 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3135 3142 originalchunks = patch.parsepatch(diff)
3136 3143
3137 3144 try:
3138 3145
3139 3146 chunks = recordfilter(repo.ui, originalchunks)
3140 3147 if reversehunks:
3141 3148 chunks = patch.reversehunks(chunks)
3142 3149
3143 3150 except patch.PatchError as err:
3144 3151 raise util.Abort(_('error parsing patch: %s') % err)
3145 3152
3146 3153 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3147 3154 # Apply changes
3148 3155 fp = cStringIO.StringIO()
3149 3156 for c in chunks:
3150 3157 c.write(fp)
3151 3158 dopatch = fp.tell()
3152 3159 fp.seek(0)
3153 3160 if dopatch:
3154 3161 try:
3155 3162 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3156 3163 except patch.PatchError as err:
3157 3164 raise util.Abort(str(err))
3158 3165 del fp
3159 3166 else:
3160 3167 for f in actions['revert'][0]:
3161 3168 checkout(f)
3162 3169 if normal:
3163 3170 normal(f)
3164 3171
3165 3172 for f in actions['add'][0]:
3166 3173 # Don't checkout modified files, they are already created by the diff
3167 3174 if f not in newlyaddedandmodifiedfiles:
3168 3175 checkout(f)
3169 3176 repo.dirstate.add(f)
3170 3177
3171 3178 normal = repo.dirstate.normallookup
3172 3179 if node == parent and p2 == nullid:
3173 3180 normal = repo.dirstate.normal
3174 3181 for f in actions['undelete'][0]:
3175 3182 checkout(f)
3176 3183 normal(f)
3177 3184
3178 3185 copied = copies.pathcopies(repo[parent], ctx)
3179 3186
3180 3187 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3181 3188 if f in copied:
3182 3189 repo.dirstate.copy(copied[f], f)
3183 3190
3184 3191 def command(table):
3185 3192 """Returns a function object to be used as a decorator for making commands.
3186 3193
3187 3194 This function receives a command table as its argument. The table should
3188 3195 be a dict.
3189 3196
3190 3197 The returned function can be used as a decorator for adding commands
3191 3198 to that command table. This function accepts multiple arguments to define
3192 3199 a command.
3193 3200
3194 3201 The first argument is the command name.
3195 3202
3196 3203 The options argument is an iterable of tuples defining command arguments.
3197 3204 See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
3198 3205
3199 3206 The synopsis argument defines a short, one line summary of how to use the
3200 3207 command. This shows up in the help output.
3201 3208
3202 3209 The norepo argument defines whether the command does not require a
3203 3210 local repository. Most commands operate against a repository, thus the
3204 3211 default is False.
3205 3212
3206 3213 The optionalrepo argument defines whether the command optionally requires
3207 3214 a local repository.
3208 3215
3209 3216 The inferrepo argument defines whether to try to find a repository from the
3210 3217 command line arguments. If True, arguments will be examined for potential
3211 3218 repository locations. See ``findrepo()``. If a repository is found, it
3212 3219 will be used.
3213 3220 """
3214 3221 def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False,
3215 3222 inferrepo=False):
3216 3223 def decorator(func):
3217 3224 if synopsis:
3218 3225 table[name] = func, list(options), synopsis
3219 3226 else:
3220 3227 table[name] = func, list(options)
3221 3228
3222 3229 if norepo:
3223 3230 # Avoid import cycle.
3224 3231 import commands
3225 3232 commands.norepo += ' %s' % ' '.join(parsealiases(name))
3226 3233
3227 3234 if optionalrepo:
3228 3235 import commands
3229 3236 commands.optionalrepo += ' %s' % ' '.join(parsealiases(name))
3230 3237
3231 3238 if inferrepo:
3232 3239 import commands
3233 3240 commands.inferrepo += ' %s' % ' '.join(parsealiases(name))
3234 3241
3235 3242 return func
3236 3243 return decorator
3237 3244
3238 3245 return cmd
3239 3246
3240 3247 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3241 3248 # commands.outgoing. "missing" is "missing" of the result of
3242 3249 # "findcommonoutgoing()"
3243 3250 outgoinghooks = util.hooks()
3244 3251
3245 3252 # a list of (ui, repo) functions called by commands.summary
3246 3253 summaryhooks = util.hooks()
3247 3254
3248 3255 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3249 3256 #
3250 3257 # functions should return tuple of booleans below, if 'changes' is None:
3251 3258 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3252 3259 #
3253 3260 # otherwise, 'changes' is a tuple of tuples below:
3254 3261 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3255 3262 # - (desturl, destbranch, destpeer, outgoing)
3256 3263 summaryremotehooks = util.hooks()
3257 3264
3258 3265 # A list of state files kept by multistep operations like graft.
3259 3266 # Since graft cannot be aborted, it is considered 'clearable' by update.
3260 3267 # note: bisect is intentionally excluded
3261 3268 # (state file, clearable, allowcommit, error, hint)
3262 3269 unfinishedstates = [
3263 3270 ('graftstate', True, False, _('graft in progress'),
3264 3271 _("use 'hg graft --continue' or 'hg update' to abort")),
3265 3272 ('updatestate', True, False, _('last update was interrupted'),
3266 3273 _("use 'hg update' to get a consistent checkout"))
3267 3274 ]
3268 3275
3269 3276 def checkunfinished(repo, commit=False):
3270 3277 '''Look for an unfinished multistep operation, like graft, and abort
3271 3278 if found. It's probably good to check this right before
3272 3279 bailifchanged().
3273 3280 '''
3274 3281 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3275 3282 if commit and allowcommit:
3276 3283 continue
3277 3284 if repo.vfs.exists(f):
3278 3285 raise util.Abort(msg, hint=hint)
3279 3286
3280 3287 def clearunfinished(repo):
3281 3288 '''Check for unfinished operations (as above), and clear the ones
3282 3289 that are clearable.
3283 3290 '''
3284 3291 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3285 3292 if not clearable and repo.vfs.exists(f):
3286 3293 raise util.Abort(msg, hint=hint)
3287 3294 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3288 3295 if clearable and repo.vfs.exists(f):
3289 3296 util.unlink(repo.join(f))
3290 3297
3291 3298 class dirstateguard(object):
3292 3299 '''Restore dirstate at unexpected failure.
3293 3300
3294 3301 At the construction, this class does:
3295 3302
3296 3303 - write current ``repo.dirstate`` out, and
3297 3304 - save ``.hg/dirstate`` into the backup file
3298 3305
3299 3306 This restores ``.hg/dirstate`` from backup file, if ``release()``
3300 3307 is invoked before ``close()``.
3301 3308
3302 3309 This just removes the backup file at ``close()`` before ``release()``.
3303 3310 '''
3304 3311
3305 3312 def __init__(self, repo, name):
3306 3313 repo.dirstate.write()
3307 3314 self._repo = repo
3308 3315 self._filename = 'dirstate.backup.%s.%d' % (name, id(self))
3309 3316 repo.vfs.write(self._filename, repo.vfs.tryread('dirstate'))
3310 3317 self._active = True
3311 3318 self._closed = False
3312 3319
3313 3320 def __del__(self):
3314 3321 if self._active: # still active
3315 3322 # this may occur, even if this class is used correctly:
3316 3323 # for example, releasing other resources like transaction
3317 3324 # may raise exception before ``dirstateguard.release`` in
3318 3325 # ``release(tr, ....)``.
3319 3326 self._abort()
3320 3327
3321 3328 def close(self):
3322 3329 if not self._active: # already inactivated
3323 3330 msg = (_("can't close already inactivated backup: %s")
3324 3331 % self._filename)
3325 3332 raise util.Abort(msg)
3326 3333
3327 3334 self._repo.vfs.unlink(self._filename)
3328 3335 self._active = False
3329 3336 self._closed = True
3330 3337
3331 3338 def _abort(self):
3332 3339 # this "invalidate()" prevents "wlock.release()" from writing
3333 3340 # changes of dirstate out after restoring to original status
3334 3341 self._repo.dirstate.invalidate()
3335 3342
3336 3343 self._repo.vfs.rename(self._filename, 'dirstate')
3337 3344 self._active = False
3338 3345
3339 3346 def release(self):
3340 3347 if not self._closed:
3341 3348 if not self._active: # already inactivated
3342 3349 msg = (_("can't release already inactivated backup: %s")
3343 3350 % self._filename)
3344 3351 raise util.Abort(msg)
3345 3352 self._abort()
3346 3353
3347 3354 _bundlecompspecs = {'none': None,
3348 3355 'bzip2': 'BZ',
3349 3356 'gzip': 'GZ',
3350 3357 }
3351 3358
3352 3359 _bundleversionspecs = {'v1': '01',
3353 3360 'v2': '02',
3354 3361 'bundle2': '02', #legacy
3355 3362 }
3356 3363
3357 3364 def parsebundletype(repo, spec):
3358 3365 """return the internal bundle type to use from a user input
3359 3366
3360 3367 This is parsing user specified bundle type as accepted in:
3361 3368
3362 3369 'hg bundle --type TYPE'.
3363 3370
3364 3371 It accept format in the form [compression][-version]|[version]
3365 3372
3366 3373 Consensus about extensions of the format for various bundle2 feature
3367 3374 is to prefix any feature with "+". eg "+treemanifest" or "gzip+phases"
3368 3375 """
3369 3376 comp, version = None, None
3370 3377
3371 3378 if '-' in spec:
3372 3379 comp, version = spec.split('-', 1)
3373 3380 elif spec in _bundlecompspecs:
3374 3381 comp = spec
3375 3382 elif spec in _bundleversionspecs:
3376 3383 version = spec
3377 3384 else:
3378 3385 raise util.Abort(_('unknown bundle type specified with --type'))
3379 3386
3380 3387 if comp is None:
3381 3388 comp = 'BZ'
3382 3389 else:
3383 3390 try:
3384 3391 comp = _bundlecompspecs[comp]
3385 3392 except KeyError:
3386 3393 raise util.Abort(_('unknown bundle type specified with --type'))
3387 3394
3388 3395 if version is None:
3389 3396 version = '01'
3390 3397 if 'generaldelta' in repo.requirements:
3391 3398 version = '02'
3392 3399 else:
3393 3400 try:
3394 3401 version = _bundleversionspecs[version]
3395 3402 except KeyError:
3396 3403 raise util.Abort(_('unknown bundle type specified with --type'))
3397 3404
3398 3405 return version, comp
@@ -1,2549 +1,2570 b''
1 1 # patch.py - patch file parsing routines
2 2 #
3 3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
4 4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 import collections
10 10 import cStringIO, email, os, errno, re, posixpath, copy
11 11 import tempfile, zlib, shutil
12 12
13 13 from i18n import _
14 14 from node import hex, short
15 15 import base85, mdiff, scmutil, util, diffhelpers, copies, encoding, error
16 16 import pathutil
17 17
18 18 gitre = re.compile('diff --git a/(.*) b/(.*)')
19 19 tabsplitter = re.compile(r'(\t+|[^\t]+)')
20 20
21 21 class PatchError(Exception):
22 22 pass
23 23
24 24
25 25 # public functions
26 26
27 27 def split(stream):
28 28 '''return an iterator of individual patches from a stream'''
29 29 def isheader(line, inheader):
30 30 if inheader and line[0] in (' ', '\t'):
31 31 # continuation
32 32 return True
33 33 if line[0] in (' ', '-', '+'):
34 34 # diff line - don't check for header pattern in there
35 35 return False
36 36 l = line.split(': ', 1)
37 37 return len(l) == 2 and ' ' not in l[0]
38 38
39 39 def chunk(lines):
40 40 return cStringIO.StringIO(''.join(lines))
41 41
42 42 def hgsplit(stream, cur):
43 43 inheader = True
44 44
45 45 for line in stream:
46 46 if not line.strip():
47 47 inheader = False
48 48 if not inheader and line.startswith('# HG changeset patch'):
49 49 yield chunk(cur)
50 50 cur = []
51 51 inheader = True
52 52
53 53 cur.append(line)
54 54
55 55 if cur:
56 56 yield chunk(cur)
57 57
58 58 def mboxsplit(stream, cur):
59 59 for line in stream:
60 60 if line.startswith('From '):
61 61 for c in split(chunk(cur[1:])):
62 62 yield c
63 63 cur = []
64 64
65 65 cur.append(line)
66 66
67 67 if cur:
68 68 for c in split(chunk(cur[1:])):
69 69 yield c
70 70
71 71 def mimesplit(stream, cur):
72 72 def msgfp(m):
73 73 fp = cStringIO.StringIO()
74 74 g = email.Generator.Generator(fp, mangle_from_=False)
75 75 g.flatten(m)
76 76 fp.seek(0)
77 77 return fp
78 78
79 79 for line in stream:
80 80 cur.append(line)
81 81 c = chunk(cur)
82 82
83 83 m = email.Parser.Parser().parse(c)
84 84 if not m.is_multipart():
85 85 yield msgfp(m)
86 86 else:
87 87 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
88 88 for part in m.walk():
89 89 ct = part.get_content_type()
90 90 if ct not in ok_types:
91 91 continue
92 92 yield msgfp(part)
93 93
94 94 def headersplit(stream, cur):
95 95 inheader = False
96 96
97 97 for line in stream:
98 98 if not inheader and isheader(line, inheader):
99 99 yield chunk(cur)
100 100 cur = []
101 101 inheader = True
102 102 if inheader and not isheader(line, inheader):
103 103 inheader = False
104 104
105 105 cur.append(line)
106 106
107 107 if cur:
108 108 yield chunk(cur)
109 109
110 110 def remainder(cur):
111 111 yield chunk(cur)
112 112
113 113 class fiter(object):
114 114 def __init__(self, fp):
115 115 self.fp = fp
116 116
117 117 def __iter__(self):
118 118 return self
119 119
120 120 def next(self):
121 121 l = self.fp.readline()
122 122 if not l:
123 123 raise StopIteration
124 124 return l
125 125
126 126 inheader = False
127 127 cur = []
128 128
129 129 mimeheaders = ['content-type']
130 130
131 131 if not util.safehasattr(stream, 'next'):
132 132 # http responses, for example, have readline but not next
133 133 stream = fiter(stream)
134 134
135 135 for line in stream:
136 136 cur.append(line)
137 137 if line.startswith('# HG changeset patch'):
138 138 return hgsplit(stream, cur)
139 139 elif line.startswith('From '):
140 140 return mboxsplit(stream, cur)
141 141 elif isheader(line, inheader):
142 142 inheader = True
143 143 if line.split(':', 1)[0].lower() in mimeheaders:
144 144 # let email parser handle this
145 145 return mimesplit(stream, cur)
146 146 elif line.startswith('--- ') and inheader:
147 147 # No evil headers seen by diff start, split by hand
148 148 return headersplit(stream, cur)
149 149 # Not enough info, keep reading
150 150
151 151 # if we are here, we have a very plain patch
152 152 return remainder(cur)
153 153
154 154 def extract(ui, fileobj):
155 155 '''extract patch from data read from fileobj.
156 156
157 157 patch can be a normal patch or contained in an email message.
158 158
159 return tuple (filename, message, user, date, branch, node, p1, p2).
160 Any item in the returned tuple can be None. If filename is None,
159 return a dictionnary. Standard keys are:
160 - filename,
161 - message,
162 - user,
163 - date,
164 - branch,
165 - node,
166 - p1,
167 - p2.
168 Any item can be missing from the dictionary. If filename is mising,
161 169 fileobj did not contain a patch. Caller must unlink filename when done.'''
162 170
163 171 # attempt to detect the start of a patch
164 172 # (this heuristic is borrowed from quilt)
165 173 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |'
166 174 r'retrieving revision [0-9]+(\.[0-9]+)*$|'
167 175 r'---[ \t].*?^\+\+\+[ \t]|'
168 176 r'\*\*\*[ \t].*?^---[ \t])', re.MULTILINE|re.DOTALL)
169 177
178 data = {}
170 179 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
171 180 tmpfp = os.fdopen(fd, 'w')
172 181 try:
173 182 msg = email.Parser.Parser().parse(fileobj)
174 183
175 184 subject = msg['Subject']
176 185 user = msg['From']
177 186 if not subject and not user:
178 187 # Not an email, restore parsed headers if any
179 188 subject = '\n'.join(': '.join(h) for h in msg.items()) + '\n'
180 189
181 190 # should try to parse msg['Date']
182 191 date = None
183 192 nodeid = None
184 193 branch = None
185 194 parents = []
186 195
187 196 if subject:
188 197 if subject.startswith('[PATCH'):
189 198 pend = subject.find(']')
190 199 if pend >= 0:
191 200 subject = subject[pend + 1:].lstrip()
192 201 subject = re.sub(r'\n[ \t]+', ' ', subject)
193 202 ui.debug('Subject: %s\n' % subject)
194 203 if user:
195 204 ui.debug('From: %s\n' % user)
196 205 diffs_seen = 0
197 206 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
198 207 message = ''
199 208 for part in msg.walk():
200 209 content_type = part.get_content_type()
201 210 ui.debug('Content-Type: %s\n' % content_type)
202 211 if content_type not in ok_types:
203 212 continue
204 213 payload = part.get_payload(decode=True)
205 214 m = diffre.search(payload)
206 215 if m:
207 216 hgpatch = False
208 217 hgpatchheader = False
209 218 ignoretext = False
210 219
211 220 ui.debug('found patch at byte %d\n' % m.start(0))
212 221 diffs_seen += 1
213 222 cfp = cStringIO.StringIO()
214 223 for line in payload[:m.start(0)].splitlines():
215 224 if line.startswith('# HG changeset patch') and not hgpatch:
216 225 ui.debug('patch generated by hg export\n')
217 226 hgpatch = True
218 227 hgpatchheader = True
219 228 # drop earlier commit message content
220 229 cfp.seek(0)
221 230 cfp.truncate()
222 231 subject = None
223 232 elif hgpatchheader:
224 233 if line.startswith('# User '):
225 234 user = line[7:]
226 235 ui.debug('From: %s\n' % user)
227 236 elif line.startswith("# Date "):
228 237 date = line[7:]
229 238 elif line.startswith("# Branch "):
230 239 branch = line[9:]
231 240 elif line.startswith("# Node ID "):
232 241 nodeid = line[10:]
233 242 elif line.startswith("# Parent "):
234 243 parents.append(line[9:].lstrip())
235 244 elif not line.startswith("# "):
236 245 hgpatchheader = False
237 246 elif line == '---':
238 247 ignoretext = True
239 248 if not hgpatchheader and not ignoretext:
240 249 cfp.write(line)
241 250 cfp.write('\n')
242 251 message = cfp.getvalue()
243 252 if tmpfp:
244 253 tmpfp.write(payload)
245 254 if not payload.endswith('\n'):
246 255 tmpfp.write('\n')
247 256 elif not diffs_seen and message and content_type == 'text/plain':
248 257 message += '\n' + payload
249 258 except: # re-raises
250 259 tmpfp.close()
251 260 os.unlink(tmpname)
252 261 raise
253 262
254 263 if subject and not message.startswith(subject):
255 264 message = '%s\n%s' % (subject, message)
256 265 tmpfp.close()
257 266 if not diffs_seen:
258 267 os.unlink(tmpname)
259 return None, message, user, date, branch, None, None, None
268 data['message'] = message
269 data['user'] = user
270 data['date'] = date
271 data['branch'] = branch
272 return data
260 273
261 274 if parents:
262 275 p1 = parents.pop(0)
263 276 else:
264 277 p1 = None
265 278
266 279 if parents:
267 280 p2 = parents.pop(0)
268 281 else:
269 282 p2 = None
270 283
271 return tmpname, message, user, date, branch, nodeid, p1, p2
284 data['filename'] = tmpname
285 data['message'] = message
286 data['user'] = user
287 data['date'] = date
288 data['branch'] = branch
289 data['nodeid'] = nodeid
290 data['p1'] = p1
291 data['p2'] = p2
292 return data
272 293
273 294 class patchmeta(object):
274 295 """Patched file metadata
275 296
276 297 'op' is the performed operation within ADD, DELETE, RENAME, MODIFY
277 298 or COPY. 'path' is patched file path. 'oldpath' is set to the
278 299 origin file when 'op' is either COPY or RENAME, None otherwise. If
279 300 file mode is changed, 'mode' is a tuple (islink, isexec) where
280 301 'islink' is True if the file is a symlink and 'isexec' is True if
281 302 the file is executable. Otherwise, 'mode' is None.
282 303 """
283 304 def __init__(self, path):
284 305 self.path = path
285 306 self.oldpath = None
286 307 self.mode = None
287 308 self.op = 'MODIFY'
288 309 self.binary = False
289 310
290 311 def setmode(self, mode):
291 312 islink = mode & 0o20000
292 313 isexec = mode & 0o100
293 314 self.mode = (islink, isexec)
294 315
295 316 def copy(self):
296 317 other = patchmeta(self.path)
297 318 other.oldpath = self.oldpath
298 319 other.mode = self.mode
299 320 other.op = self.op
300 321 other.binary = self.binary
301 322 return other
302 323
303 324 def _ispatchinga(self, afile):
304 325 if afile == '/dev/null':
305 326 return self.op == 'ADD'
306 327 return afile == 'a/' + (self.oldpath or self.path)
307 328
308 329 def _ispatchingb(self, bfile):
309 330 if bfile == '/dev/null':
310 331 return self.op == 'DELETE'
311 332 return bfile == 'b/' + self.path
312 333
313 334 def ispatching(self, afile, bfile):
314 335 return self._ispatchinga(afile) and self._ispatchingb(bfile)
315 336
316 337 def __repr__(self):
317 338 return "<patchmeta %s %r>" % (self.op, self.path)
318 339
319 340 def readgitpatch(lr):
320 341 """extract git-style metadata about patches from <patchname>"""
321 342
322 343 # Filter patch for git information
323 344 gp = None
324 345 gitpatches = []
325 346 for line in lr:
326 347 line = line.rstrip(' \r\n')
327 348 if line.startswith('diff --git a/'):
328 349 m = gitre.match(line)
329 350 if m:
330 351 if gp:
331 352 gitpatches.append(gp)
332 353 dst = m.group(2)
333 354 gp = patchmeta(dst)
334 355 elif gp:
335 356 if line.startswith('--- '):
336 357 gitpatches.append(gp)
337 358 gp = None
338 359 continue
339 360 if line.startswith('rename from '):
340 361 gp.op = 'RENAME'
341 362 gp.oldpath = line[12:]
342 363 elif line.startswith('rename to '):
343 364 gp.path = line[10:]
344 365 elif line.startswith('copy from '):
345 366 gp.op = 'COPY'
346 367 gp.oldpath = line[10:]
347 368 elif line.startswith('copy to '):
348 369 gp.path = line[8:]
349 370 elif line.startswith('deleted file'):
350 371 gp.op = 'DELETE'
351 372 elif line.startswith('new file mode '):
352 373 gp.op = 'ADD'
353 374 gp.setmode(int(line[-6:], 8))
354 375 elif line.startswith('new mode '):
355 376 gp.setmode(int(line[-6:], 8))
356 377 elif line.startswith('GIT binary patch'):
357 378 gp.binary = True
358 379 if gp:
359 380 gitpatches.append(gp)
360 381
361 382 return gitpatches
362 383
363 384 class linereader(object):
364 385 # simple class to allow pushing lines back into the input stream
365 386 def __init__(self, fp):
366 387 self.fp = fp
367 388 self.buf = []
368 389
369 390 def push(self, line):
370 391 if line is not None:
371 392 self.buf.append(line)
372 393
373 394 def readline(self):
374 395 if self.buf:
375 396 l = self.buf[0]
376 397 del self.buf[0]
377 398 return l
378 399 return self.fp.readline()
379 400
380 401 def __iter__(self):
381 402 while True:
382 403 l = self.readline()
383 404 if not l:
384 405 break
385 406 yield l
386 407
387 408 class abstractbackend(object):
388 409 def __init__(self, ui):
389 410 self.ui = ui
390 411
391 412 def getfile(self, fname):
392 413 """Return target file data and flags as a (data, (islink,
393 414 isexec)) tuple. Data is None if file is missing/deleted.
394 415 """
395 416 raise NotImplementedError
396 417
397 418 def setfile(self, fname, data, mode, copysource):
398 419 """Write data to target file fname and set its mode. mode is a
399 420 (islink, isexec) tuple. If data is None, the file content should
400 421 be left unchanged. If the file is modified after being copied,
401 422 copysource is set to the original file name.
402 423 """
403 424 raise NotImplementedError
404 425
405 426 def unlink(self, fname):
406 427 """Unlink target file."""
407 428 raise NotImplementedError
408 429
409 430 def writerej(self, fname, failed, total, lines):
410 431 """Write rejected lines for fname. total is the number of hunks
411 432 which failed to apply and total the total number of hunks for this
412 433 files.
413 434 """
414 435 pass
415 436
416 437 def exists(self, fname):
417 438 raise NotImplementedError
418 439
419 440 class fsbackend(abstractbackend):
420 441 def __init__(self, ui, basedir):
421 442 super(fsbackend, self).__init__(ui)
422 443 self.opener = scmutil.opener(basedir)
423 444
424 445 def _join(self, f):
425 446 return os.path.join(self.opener.base, f)
426 447
427 448 def getfile(self, fname):
428 449 if self.opener.islink(fname):
429 450 return (self.opener.readlink(fname), (True, False))
430 451
431 452 isexec = False
432 453 try:
433 454 isexec = self.opener.lstat(fname).st_mode & 0o100 != 0
434 455 except OSError as e:
435 456 if e.errno != errno.ENOENT:
436 457 raise
437 458 try:
438 459 return (self.opener.read(fname), (False, isexec))
439 460 except IOError as e:
440 461 if e.errno != errno.ENOENT:
441 462 raise
442 463 return None, None
443 464
444 465 def setfile(self, fname, data, mode, copysource):
445 466 islink, isexec = mode
446 467 if data is None:
447 468 self.opener.setflags(fname, islink, isexec)
448 469 return
449 470 if islink:
450 471 self.opener.symlink(data, fname)
451 472 else:
452 473 self.opener.write(fname, data)
453 474 if isexec:
454 475 self.opener.setflags(fname, False, True)
455 476
456 477 def unlink(self, fname):
457 478 self.opener.unlinkpath(fname, ignoremissing=True)
458 479
459 480 def writerej(self, fname, failed, total, lines):
460 481 fname = fname + ".rej"
461 482 self.ui.warn(
462 483 _("%d out of %d hunks FAILED -- saving rejects to file %s\n") %
463 484 (failed, total, fname))
464 485 fp = self.opener(fname, 'w')
465 486 fp.writelines(lines)
466 487 fp.close()
467 488
468 489 def exists(self, fname):
469 490 return self.opener.lexists(fname)
470 491
471 492 class workingbackend(fsbackend):
472 493 def __init__(self, ui, repo, similarity):
473 494 super(workingbackend, self).__init__(ui, repo.root)
474 495 self.repo = repo
475 496 self.similarity = similarity
476 497 self.removed = set()
477 498 self.changed = set()
478 499 self.copied = []
479 500
480 501 def _checkknown(self, fname):
481 502 if self.repo.dirstate[fname] == '?' and self.exists(fname):
482 503 raise PatchError(_('cannot patch %s: file is not tracked') % fname)
483 504
484 505 def setfile(self, fname, data, mode, copysource):
485 506 self._checkknown(fname)
486 507 super(workingbackend, self).setfile(fname, data, mode, copysource)
487 508 if copysource is not None:
488 509 self.copied.append((copysource, fname))
489 510 self.changed.add(fname)
490 511
491 512 def unlink(self, fname):
492 513 self._checkknown(fname)
493 514 super(workingbackend, self).unlink(fname)
494 515 self.removed.add(fname)
495 516 self.changed.add(fname)
496 517
497 518 def close(self):
498 519 wctx = self.repo[None]
499 520 changed = set(self.changed)
500 521 for src, dst in self.copied:
501 522 scmutil.dirstatecopy(self.ui, self.repo, wctx, src, dst)
502 523 if self.removed:
503 524 wctx.forget(sorted(self.removed))
504 525 for f in self.removed:
505 526 if f not in self.repo.dirstate:
506 527 # File was deleted and no longer belongs to the
507 528 # dirstate, it was probably marked added then
508 529 # deleted, and should not be considered by
509 530 # marktouched().
510 531 changed.discard(f)
511 532 if changed:
512 533 scmutil.marktouched(self.repo, changed, self.similarity)
513 534 return sorted(self.changed)
514 535
515 536 class filestore(object):
516 537 def __init__(self, maxsize=None):
517 538 self.opener = None
518 539 self.files = {}
519 540 self.created = 0
520 541 self.maxsize = maxsize
521 542 if self.maxsize is None:
522 543 self.maxsize = 4*(2**20)
523 544 self.size = 0
524 545 self.data = {}
525 546
526 547 def setfile(self, fname, data, mode, copied=None):
527 548 if self.maxsize < 0 or (len(data) + self.size) <= self.maxsize:
528 549 self.data[fname] = (data, mode, copied)
529 550 self.size += len(data)
530 551 else:
531 552 if self.opener is None:
532 553 root = tempfile.mkdtemp(prefix='hg-patch-')
533 554 self.opener = scmutil.opener(root)
534 555 # Avoid filename issues with these simple names
535 556 fn = str(self.created)
536 557 self.opener.write(fn, data)
537 558 self.created += 1
538 559 self.files[fname] = (fn, mode, copied)
539 560
540 561 def getfile(self, fname):
541 562 if fname in self.data:
542 563 return self.data[fname]
543 564 if not self.opener or fname not in self.files:
544 565 return None, None, None
545 566 fn, mode, copied = self.files[fname]
546 567 return self.opener.read(fn), mode, copied
547 568
548 569 def close(self):
549 570 if self.opener:
550 571 shutil.rmtree(self.opener.base)
551 572
552 573 class repobackend(abstractbackend):
553 574 def __init__(self, ui, repo, ctx, store):
554 575 super(repobackend, self).__init__(ui)
555 576 self.repo = repo
556 577 self.ctx = ctx
557 578 self.store = store
558 579 self.changed = set()
559 580 self.removed = set()
560 581 self.copied = {}
561 582
562 583 def _checkknown(self, fname):
563 584 if fname not in self.ctx:
564 585 raise PatchError(_('cannot patch %s: file is not tracked') % fname)
565 586
566 587 def getfile(self, fname):
567 588 try:
568 589 fctx = self.ctx[fname]
569 590 except error.LookupError:
570 591 return None, None
571 592 flags = fctx.flags()
572 593 return fctx.data(), ('l' in flags, 'x' in flags)
573 594
574 595 def setfile(self, fname, data, mode, copysource):
575 596 if copysource:
576 597 self._checkknown(copysource)
577 598 if data is None:
578 599 data = self.ctx[fname].data()
579 600 self.store.setfile(fname, data, mode, copysource)
580 601 self.changed.add(fname)
581 602 if copysource:
582 603 self.copied[fname] = copysource
583 604
584 605 def unlink(self, fname):
585 606 self._checkknown(fname)
586 607 self.removed.add(fname)
587 608
588 609 def exists(self, fname):
589 610 return fname in self.ctx
590 611
591 612 def close(self):
592 613 return self.changed | self.removed
593 614
594 615 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
595 616 unidesc = re.compile('@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@')
596 617 contextdesc = re.compile('(?:---|\*\*\*) (\d+)(?:,(\d+))? (?:---|\*\*\*)')
597 618 eolmodes = ['strict', 'crlf', 'lf', 'auto']
598 619
599 620 class patchfile(object):
600 621 def __init__(self, ui, gp, backend, store, eolmode='strict'):
601 622 self.fname = gp.path
602 623 self.eolmode = eolmode
603 624 self.eol = None
604 625 self.backend = backend
605 626 self.ui = ui
606 627 self.lines = []
607 628 self.exists = False
608 629 self.missing = True
609 630 self.mode = gp.mode
610 631 self.copysource = gp.oldpath
611 632 self.create = gp.op in ('ADD', 'COPY', 'RENAME')
612 633 self.remove = gp.op == 'DELETE'
613 634 if self.copysource is None:
614 635 data, mode = backend.getfile(self.fname)
615 636 else:
616 637 data, mode = store.getfile(self.copysource)[:2]
617 638 if data is not None:
618 639 self.exists = self.copysource is None or backend.exists(self.fname)
619 640 self.missing = False
620 641 if data:
621 642 self.lines = mdiff.splitnewlines(data)
622 643 if self.mode is None:
623 644 self.mode = mode
624 645 if self.lines:
625 646 # Normalize line endings
626 647 if self.lines[0].endswith('\r\n'):
627 648 self.eol = '\r\n'
628 649 elif self.lines[0].endswith('\n'):
629 650 self.eol = '\n'
630 651 if eolmode != 'strict':
631 652 nlines = []
632 653 for l in self.lines:
633 654 if l.endswith('\r\n'):
634 655 l = l[:-2] + '\n'
635 656 nlines.append(l)
636 657 self.lines = nlines
637 658 else:
638 659 if self.create:
639 660 self.missing = False
640 661 if self.mode is None:
641 662 self.mode = (False, False)
642 663 if self.missing:
643 664 self.ui.warn(_("unable to find '%s' for patching\n") % self.fname)
644 665
645 666 self.hash = {}
646 667 self.dirty = 0
647 668 self.offset = 0
648 669 self.skew = 0
649 670 self.rej = []
650 671 self.fileprinted = False
651 672 self.printfile(False)
652 673 self.hunks = 0
653 674
654 675 def writelines(self, fname, lines, mode):
655 676 if self.eolmode == 'auto':
656 677 eol = self.eol
657 678 elif self.eolmode == 'crlf':
658 679 eol = '\r\n'
659 680 else:
660 681 eol = '\n'
661 682
662 683 if self.eolmode != 'strict' and eol and eol != '\n':
663 684 rawlines = []
664 685 for l in lines:
665 686 if l and l[-1] == '\n':
666 687 l = l[:-1] + eol
667 688 rawlines.append(l)
668 689 lines = rawlines
669 690
670 691 self.backend.setfile(fname, ''.join(lines), mode, self.copysource)
671 692
672 693 def printfile(self, warn):
673 694 if self.fileprinted:
674 695 return
675 696 if warn or self.ui.verbose:
676 697 self.fileprinted = True
677 698 s = _("patching file %s\n") % self.fname
678 699 if warn:
679 700 self.ui.warn(s)
680 701 else:
681 702 self.ui.note(s)
682 703
683 704
684 705 def findlines(self, l, linenum):
685 706 # looks through the hash and finds candidate lines. The
686 707 # result is a list of line numbers sorted based on distance
687 708 # from linenum
688 709
689 710 cand = self.hash.get(l, [])
690 711 if len(cand) > 1:
691 712 # resort our list of potentials forward then back.
692 713 cand.sort(key=lambda x: abs(x - linenum))
693 714 return cand
694 715
695 716 def write_rej(self):
696 717 # our rejects are a little different from patch(1). This always
697 718 # creates rejects in the same form as the original patch. A file
698 719 # header is inserted so that you can run the reject through patch again
699 720 # without having to type the filename.
700 721 if not self.rej:
701 722 return
702 723 base = os.path.basename(self.fname)
703 724 lines = ["--- %s\n+++ %s\n" % (base, base)]
704 725 for x in self.rej:
705 726 for l in x.hunk:
706 727 lines.append(l)
707 728 if l[-1] != '\n':
708 729 lines.append("\n\ No newline at end of file\n")
709 730 self.backend.writerej(self.fname, len(self.rej), self.hunks, lines)
710 731
711 732 def apply(self, h):
712 733 if not h.complete():
713 734 raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
714 735 (h.number, h.desc, len(h.a), h.lena, len(h.b),
715 736 h.lenb))
716 737
717 738 self.hunks += 1
718 739
719 740 if self.missing:
720 741 self.rej.append(h)
721 742 return -1
722 743
723 744 if self.exists and self.create:
724 745 if self.copysource:
725 746 self.ui.warn(_("cannot create %s: destination already "
726 747 "exists\n") % self.fname)
727 748 else:
728 749 self.ui.warn(_("file %s already exists\n") % self.fname)
729 750 self.rej.append(h)
730 751 return -1
731 752
732 753 if isinstance(h, binhunk):
733 754 if self.remove:
734 755 self.backend.unlink(self.fname)
735 756 else:
736 757 l = h.new(self.lines)
737 758 self.lines[:] = l
738 759 self.offset += len(l)
739 760 self.dirty = True
740 761 return 0
741 762
742 763 horig = h
743 764 if (self.eolmode in ('crlf', 'lf')
744 765 or self.eolmode == 'auto' and self.eol):
745 766 # If new eols are going to be normalized, then normalize
746 767 # hunk data before patching. Otherwise, preserve input
747 768 # line-endings.
748 769 h = h.getnormalized()
749 770
750 771 # fast case first, no offsets, no fuzz
751 772 old, oldstart, new, newstart = h.fuzzit(0, False)
752 773 oldstart += self.offset
753 774 orig_start = oldstart
754 775 # if there's skew we want to emit the "(offset %d lines)" even
755 776 # when the hunk cleanly applies at start + skew, so skip the
756 777 # fast case code
757 778 if (self.skew == 0 and
758 779 diffhelpers.testhunk(old, self.lines, oldstart) == 0):
759 780 if self.remove:
760 781 self.backend.unlink(self.fname)
761 782 else:
762 783 self.lines[oldstart:oldstart + len(old)] = new
763 784 self.offset += len(new) - len(old)
764 785 self.dirty = True
765 786 return 0
766 787
767 788 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
768 789 self.hash = {}
769 790 for x, s in enumerate(self.lines):
770 791 self.hash.setdefault(s, []).append(x)
771 792
772 793 for fuzzlen in xrange(self.ui.configint("patch", "fuzz", 2) + 1):
773 794 for toponly in [True, False]:
774 795 old, oldstart, new, newstart = h.fuzzit(fuzzlen, toponly)
775 796 oldstart = oldstart + self.offset + self.skew
776 797 oldstart = min(oldstart, len(self.lines))
777 798 if old:
778 799 cand = self.findlines(old[0][1:], oldstart)
779 800 else:
780 801 # Only adding lines with no or fuzzed context, just
781 802 # take the skew in account
782 803 cand = [oldstart]
783 804
784 805 for l in cand:
785 806 if not old or diffhelpers.testhunk(old, self.lines, l) == 0:
786 807 self.lines[l : l + len(old)] = new
787 808 self.offset += len(new) - len(old)
788 809 self.skew = l - orig_start
789 810 self.dirty = True
790 811 offset = l - orig_start - fuzzlen
791 812 if fuzzlen:
792 813 msg = _("Hunk #%d succeeded at %d "
793 814 "with fuzz %d "
794 815 "(offset %d lines).\n")
795 816 self.printfile(True)
796 817 self.ui.warn(msg %
797 818 (h.number, l + 1, fuzzlen, offset))
798 819 else:
799 820 msg = _("Hunk #%d succeeded at %d "
800 821 "(offset %d lines).\n")
801 822 self.ui.note(msg % (h.number, l + 1, offset))
802 823 return fuzzlen
803 824 self.printfile(True)
804 825 self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
805 826 self.rej.append(horig)
806 827 return -1
807 828
808 829 def close(self):
809 830 if self.dirty:
810 831 self.writelines(self.fname, self.lines, self.mode)
811 832 self.write_rej()
812 833 return len(self.rej)
813 834
814 835 class header(object):
815 836 """patch header
816 837 """
817 838 diffgit_re = re.compile('diff --git a/(.*) b/(.*)$')
818 839 diff_re = re.compile('diff -r .* (.*)$')
819 840 allhunks_re = re.compile('(?:index|deleted file) ')
820 841 pretty_re = re.compile('(?:new file|deleted file) ')
821 842 special_re = re.compile('(?:index|deleted|copy|rename) ')
822 843 newfile_re = re.compile('(?:new file)')
823 844
824 845 def __init__(self, header):
825 846 self.header = header
826 847 self.hunks = []
827 848
828 849 def binary(self):
829 850 return any(h.startswith('index ') for h in self.header)
830 851
831 852 def pretty(self, fp):
832 853 for h in self.header:
833 854 if h.startswith('index '):
834 855 fp.write(_('this modifies a binary file (all or nothing)\n'))
835 856 break
836 857 if self.pretty_re.match(h):
837 858 fp.write(h)
838 859 if self.binary():
839 860 fp.write(_('this is a binary file\n'))
840 861 break
841 862 if h.startswith('---'):
842 863 fp.write(_('%d hunks, %d lines changed\n') %
843 864 (len(self.hunks),
844 865 sum([max(h.added, h.removed) for h in self.hunks])))
845 866 break
846 867 fp.write(h)
847 868
848 869 def write(self, fp):
849 870 fp.write(''.join(self.header))
850 871
851 872 def allhunks(self):
852 873 return any(self.allhunks_re.match(h) for h in self.header)
853 874
854 875 def files(self):
855 876 match = self.diffgit_re.match(self.header[0])
856 877 if match:
857 878 fromfile, tofile = match.groups()
858 879 if fromfile == tofile:
859 880 return [fromfile]
860 881 return [fromfile, tofile]
861 882 else:
862 883 return self.diff_re.match(self.header[0]).groups()
863 884
864 885 def filename(self):
865 886 return self.files()[-1]
866 887
867 888 def __repr__(self):
868 889 return '<header %s>' % (' '.join(map(repr, self.files())))
869 890
870 891 def isnewfile(self):
871 892 return any(self.newfile_re.match(h) for h in self.header)
872 893
873 894 def special(self):
874 895 # Special files are shown only at the header level and not at the hunk
875 896 # level for example a file that has been deleted is a special file.
876 897 # The user cannot change the content of the operation, in the case of
877 898 # the deleted file he has to take the deletion or not take it, he
878 899 # cannot take some of it.
879 900 # Newly added files are special if they are empty, they are not special
880 901 # if they have some content as we want to be able to change it
881 902 nocontent = len(self.header) == 2
882 903 emptynewfile = self.isnewfile() and nocontent
883 904 return emptynewfile or \
884 905 any(self.special_re.match(h) for h in self.header)
885 906
886 907 class recordhunk(object):
887 908 """patch hunk
888 909
889 910 XXX shouldn't we merge this with the other hunk class?
890 911 """
891 912 maxcontext = 3
892 913
893 914 def __init__(self, header, fromline, toline, proc, before, hunk, after):
894 915 def trimcontext(number, lines):
895 916 delta = len(lines) - self.maxcontext
896 917 if False and delta > 0:
897 918 return number + delta, lines[:self.maxcontext]
898 919 return number, lines
899 920
900 921 self.header = header
901 922 self.fromline, self.before = trimcontext(fromline, before)
902 923 self.toline, self.after = trimcontext(toline, after)
903 924 self.proc = proc
904 925 self.hunk = hunk
905 926 self.added, self.removed = self.countchanges(self.hunk)
906 927
907 928 def __eq__(self, v):
908 929 if not isinstance(v, recordhunk):
909 930 return False
910 931
911 932 return ((v.hunk == self.hunk) and
912 933 (v.proc == self.proc) and
913 934 (self.fromline == v.fromline) and
914 935 (self.header.files() == v.header.files()))
915 936
916 937 def __hash__(self):
917 938 return hash((tuple(self.hunk),
918 939 tuple(self.header.files()),
919 940 self.fromline,
920 941 self.proc))
921 942
922 943 def countchanges(self, hunk):
923 944 """hunk -> (n+,n-)"""
924 945 add = len([h for h in hunk if h[0] == '+'])
925 946 rem = len([h for h in hunk if h[0] == '-'])
926 947 return add, rem
927 948
928 949 def write(self, fp):
929 950 delta = len(self.before) + len(self.after)
930 951 if self.after and self.after[-1] == '\\ No newline at end of file\n':
931 952 delta -= 1
932 953 fromlen = delta + self.removed
933 954 tolen = delta + self.added
934 955 fp.write('@@ -%d,%d +%d,%d @@%s\n' %
935 956 (self.fromline, fromlen, self.toline, tolen,
936 957 self.proc and (' ' + self.proc)))
937 958 fp.write(''.join(self.before + self.hunk + self.after))
938 959
939 960 pretty = write
940 961
941 962 def filename(self):
942 963 return self.header.filename()
943 964
944 965 def __repr__(self):
945 966 return '<hunk %r@%d>' % (self.filename(), self.fromline)
946 967
947 968 def filterpatch(ui, headers, operation=None):
948 969 """Interactively filter patch chunks into applied-only chunks"""
949 970 if operation is None:
950 971 operation = _('record')
951 972
952 973 def prompt(skipfile, skipall, query, chunk):
953 974 """prompt query, and process base inputs
954 975
955 976 - y/n for the rest of file
956 977 - y/n for the rest
957 978 - ? (help)
958 979 - q (quit)
959 980
960 981 Return True/False and possibly updated skipfile and skipall.
961 982 """
962 983 newpatches = None
963 984 if skipall is not None:
964 985 return skipall, skipfile, skipall, newpatches
965 986 if skipfile is not None:
966 987 return skipfile, skipfile, skipall, newpatches
967 988 while True:
968 989 resps = _('[Ynesfdaq?]'
969 990 '$$ &Yes, record this change'
970 991 '$$ &No, skip this change'
971 992 '$$ &Edit this change manually'
972 993 '$$ &Skip remaining changes to this file'
973 994 '$$ Record remaining changes to this &file'
974 995 '$$ &Done, skip remaining changes and files'
975 996 '$$ Record &all changes to all remaining files'
976 997 '$$ &Quit, recording no changes'
977 998 '$$ &? (display help)')
978 999 r = ui.promptchoice("%s %s" % (query, resps))
979 1000 ui.write("\n")
980 1001 if r == 8: # ?
981 1002 for c, t in ui.extractchoices(resps)[1]:
982 1003 ui.write('%s - %s\n' % (c, t.lower()))
983 1004 continue
984 1005 elif r == 0: # yes
985 1006 ret = True
986 1007 elif r == 1: # no
987 1008 ret = False
988 1009 elif r == 2: # Edit patch
989 1010 if chunk is None:
990 1011 ui.write(_('cannot edit patch for whole file'))
991 1012 ui.write("\n")
992 1013 continue
993 1014 if chunk.header.binary():
994 1015 ui.write(_('cannot edit patch for binary file'))
995 1016 ui.write("\n")
996 1017 continue
997 1018 # Patch comment based on the Git one (based on comment at end of
998 1019 # https://mercurial-scm.org/wiki/RecordExtension)
999 1020 phelp = '---' + _("""
1000 1021 To remove '-' lines, make them ' ' lines (context).
1001 1022 To remove '+' lines, delete them.
1002 1023 Lines starting with # will be removed from the patch.
1003 1024
1004 1025 If the patch applies cleanly, the edited hunk will immediately be
1005 1026 added to the record list. If it does not apply cleanly, a rejects
1006 1027 file will be generated: you can use that when you try again. If
1007 1028 all lines of the hunk are removed, then the edit is aborted and
1008 1029 the hunk is left unchanged.
1009 1030 """)
1010 1031 (patchfd, patchfn) = tempfile.mkstemp(prefix="hg-editor-",
1011 1032 suffix=".diff", text=True)
1012 1033 ncpatchfp = None
1013 1034 try:
1014 1035 # Write the initial patch
1015 1036 f = os.fdopen(patchfd, "w")
1016 1037 chunk.header.write(f)
1017 1038 chunk.write(f)
1018 1039 f.write('\n'.join(['# ' + i for i in phelp.splitlines()]))
1019 1040 f.close()
1020 1041 # Start the editor and wait for it to complete
1021 1042 editor = ui.geteditor()
1022 1043 ret = ui.system("%s \"%s\"" % (editor, patchfn),
1023 1044 environ={'HGUSER': ui.username()})
1024 1045 if ret != 0:
1025 1046 ui.warn(_("editor exited with exit code %d\n") % ret)
1026 1047 continue
1027 1048 # Remove comment lines
1028 1049 patchfp = open(patchfn)
1029 1050 ncpatchfp = cStringIO.StringIO()
1030 1051 for line in patchfp:
1031 1052 if not line.startswith('#'):
1032 1053 ncpatchfp.write(line)
1033 1054 patchfp.close()
1034 1055 ncpatchfp.seek(0)
1035 1056 newpatches = parsepatch(ncpatchfp)
1036 1057 finally:
1037 1058 os.unlink(patchfn)
1038 1059 del ncpatchfp
1039 1060 # Signal that the chunk shouldn't be applied as-is, but
1040 1061 # provide the new patch to be used instead.
1041 1062 ret = False
1042 1063 elif r == 3: # Skip
1043 1064 ret = skipfile = False
1044 1065 elif r == 4: # file (Record remaining)
1045 1066 ret = skipfile = True
1046 1067 elif r == 5: # done, skip remaining
1047 1068 ret = skipall = False
1048 1069 elif r == 6: # all
1049 1070 ret = skipall = True
1050 1071 elif r == 7: # quit
1051 1072 raise util.Abort(_('user quit'))
1052 1073 return ret, skipfile, skipall, newpatches
1053 1074
1054 1075 seen = set()
1055 1076 applied = {} # 'filename' -> [] of chunks
1056 1077 skipfile, skipall = None, None
1057 1078 pos, total = 1, sum(len(h.hunks) for h in headers)
1058 1079 for h in headers:
1059 1080 pos += len(h.hunks)
1060 1081 skipfile = None
1061 1082 fixoffset = 0
1062 1083 hdr = ''.join(h.header)
1063 1084 if hdr in seen:
1064 1085 continue
1065 1086 seen.add(hdr)
1066 1087 if skipall is None:
1067 1088 h.pretty(ui)
1068 1089 msg = (_('examine changes to %s?') %
1069 1090 _(' and ').join("'%s'" % f for f in h.files()))
1070 1091 r, skipfile, skipall, np = prompt(skipfile, skipall, msg, None)
1071 1092 if not r:
1072 1093 continue
1073 1094 applied[h.filename()] = [h]
1074 1095 if h.allhunks():
1075 1096 applied[h.filename()] += h.hunks
1076 1097 continue
1077 1098 for i, chunk in enumerate(h.hunks):
1078 1099 if skipfile is None and skipall is None:
1079 1100 chunk.pretty(ui)
1080 1101 if total == 1:
1081 1102 msg = _("record this change to '%s'?") % chunk.filename()
1082 1103 else:
1083 1104 idx = pos - len(h.hunks) + i
1084 1105 msg = _("record change %d/%d to '%s'?") % (idx, total,
1085 1106 chunk.filename())
1086 1107 r, skipfile, skipall, newpatches = prompt(skipfile,
1087 1108 skipall, msg, chunk)
1088 1109 if r:
1089 1110 if fixoffset:
1090 1111 chunk = copy.copy(chunk)
1091 1112 chunk.toline += fixoffset
1092 1113 applied[chunk.filename()].append(chunk)
1093 1114 elif newpatches is not None:
1094 1115 for newpatch in newpatches:
1095 1116 for newhunk in newpatch.hunks:
1096 1117 if fixoffset:
1097 1118 newhunk.toline += fixoffset
1098 1119 applied[newhunk.filename()].append(newhunk)
1099 1120 else:
1100 1121 fixoffset += chunk.removed - chunk.added
1101 1122 return sum([h for h in applied.itervalues()
1102 1123 if h[0].special() or len(h) > 1], [])
1103 1124 class hunk(object):
1104 1125 def __init__(self, desc, num, lr, context):
1105 1126 self.number = num
1106 1127 self.desc = desc
1107 1128 self.hunk = [desc]
1108 1129 self.a = []
1109 1130 self.b = []
1110 1131 self.starta = self.lena = None
1111 1132 self.startb = self.lenb = None
1112 1133 if lr is not None:
1113 1134 if context:
1114 1135 self.read_context_hunk(lr)
1115 1136 else:
1116 1137 self.read_unified_hunk(lr)
1117 1138
1118 1139 def getnormalized(self):
1119 1140 """Return a copy with line endings normalized to LF."""
1120 1141
1121 1142 def normalize(lines):
1122 1143 nlines = []
1123 1144 for line in lines:
1124 1145 if line.endswith('\r\n'):
1125 1146 line = line[:-2] + '\n'
1126 1147 nlines.append(line)
1127 1148 return nlines
1128 1149
1129 1150 # Dummy object, it is rebuilt manually
1130 1151 nh = hunk(self.desc, self.number, None, None)
1131 1152 nh.number = self.number
1132 1153 nh.desc = self.desc
1133 1154 nh.hunk = self.hunk
1134 1155 nh.a = normalize(self.a)
1135 1156 nh.b = normalize(self.b)
1136 1157 nh.starta = self.starta
1137 1158 nh.startb = self.startb
1138 1159 nh.lena = self.lena
1139 1160 nh.lenb = self.lenb
1140 1161 return nh
1141 1162
1142 1163 def read_unified_hunk(self, lr):
1143 1164 m = unidesc.match(self.desc)
1144 1165 if not m:
1145 1166 raise PatchError(_("bad hunk #%d") % self.number)
1146 1167 self.starta, self.lena, self.startb, self.lenb = m.groups()
1147 1168 if self.lena is None:
1148 1169 self.lena = 1
1149 1170 else:
1150 1171 self.lena = int(self.lena)
1151 1172 if self.lenb is None:
1152 1173 self.lenb = 1
1153 1174 else:
1154 1175 self.lenb = int(self.lenb)
1155 1176 self.starta = int(self.starta)
1156 1177 self.startb = int(self.startb)
1157 1178 diffhelpers.addlines(lr, self.hunk, self.lena, self.lenb, self.a,
1158 1179 self.b)
1159 1180 # if we hit eof before finishing out the hunk, the last line will
1160 1181 # be zero length. Lets try to fix it up.
1161 1182 while len(self.hunk[-1]) == 0:
1162 1183 del self.hunk[-1]
1163 1184 del self.a[-1]
1164 1185 del self.b[-1]
1165 1186 self.lena -= 1
1166 1187 self.lenb -= 1
1167 1188 self._fixnewline(lr)
1168 1189
1169 1190 def read_context_hunk(self, lr):
1170 1191 self.desc = lr.readline()
1171 1192 m = contextdesc.match(self.desc)
1172 1193 if not m:
1173 1194 raise PatchError(_("bad hunk #%d") % self.number)
1174 1195 self.starta, aend = m.groups()
1175 1196 self.starta = int(self.starta)
1176 1197 if aend is None:
1177 1198 aend = self.starta
1178 1199 self.lena = int(aend) - self.starta
1179 1200 if self.starta:
1180 1201 self.lena += 1
1181 1202 for x in xrange(self.lena):
1182 1203 l = lr.readline()
1183 1204 if l.startswith('---'):
1184 1205 # lines addition, old block is empty
1185 1206 lr.push(l)
1186 1207 break
1187 1208 s = l[2:]
1188 1209 if l.startswith('- ') or l.startswith('! '):
1189 1210 u = '-' + s
1190 1211 elif l.startswith(' '):
1191 1212 u = ' ' + s
1192 1213 else:
1193 1214 raise PatchError(_("bad hunk #%d old text line %d") %
1194 1215 (self.number, x))
1195 1216 self.a.append(u)
1196 1217 self.hunk.append(u)
1197 1218
1198 1219 l = lr.readline()
1199 1220 if l.startswith('\ '):
1200 1221 s = self.a[-1][:-1]
1201 1222 self.a[-1] = s
1202 1223 self.hunk[-1] = s
1203 1224 l = lr.readline()
1204 1225 m = contextdesc.match(l)
1205 1226 if not m:
1206 1227 raise PatchError(_("bad hunk #%d") % self.number)
1207 1228 self.startb, bend = m.groups()
1208 1229 self.startb = int(self.startb)
1209 1230 if bend is None:
1210 1231 bend = self.startb
1211 1232 self.lenb = int(bend) - self.startb
1212 1233 if self.startb:
1213 1234 self.lenb += 1
1214 1235 hunki = 1
1215 1236 for x in xrange(self.lenb):
1216 1237 l = lr.readline()
1217 1238 if l.startswith('\ '):
1218 1239 # XXX: the only way to hit this is with an invalid line range.
1219 1240 # The no-eol marker is not counted in the line range, but I
1220 1241 # guess there are diff(1) out there which behave differently.
1221 1242 s = self.b[-1][:-1]
1222 1243 self.b[-1] = s
1223 1244 self.hunk[hunki - 1] = s
1224 1245 continue
1225 1246 if not l:
1226 1247 # line deletions, new block is empty and we hit EOF
1227 1248 lr.push(l)
1228 1249 break
1229 1250 s = l[2:]
1230 1251 if l.startswith('+ ') or l.startswith('! '):
1231 1252 u = '+' + s
1232 1253 elif l.startswith(' '):
1233 1254 u = ' ' + s
1234 1255 elif len(self.b) == 0:
1235 1256 # line deletions, new block is empty
1236 1257 lr.push(l)
1237 1258 break
1238 1259 else:
1239 1260 raise PatchError(_("bad hunk #%d old text line %d") %
1240 1261 (self.number, x))
1241 1262 self.b.append(s)
1242 1263 while True:
1243 1264 if hunki >= len(self.hunk):
1244 1265 h = ""
1245 1266 else:
1246 1267 h = self.hunk[hunki]
1247 1268 hunki += 1
1248 1269 if h == u:
1249 1270 break
1250 1271 elif h.startswith('-'):
1251 1272 continue
1252 1273 else:
1253 1274 self.hunk.insert(hunki - 1, u)
1254 1275 break
1255 1276
1256 1277 if not self.a:
1257 1278 # this happens when lines were only added to the hunk
1258 1279 for x in self.hunk:
1259 1280 if x.startswith('-') or x.startswith(' '):
1260 1281 self.a.append(x)
1261 1282 if not self.b:
1262 1283 # this happens when lines were only deleted from the hunk
1263 1284 for x in self.hunk:
1264 1285 if x.startswith('+') or x.startswith(' '):
1265 1286 self.b.append(x[1:])
1266 1287 # @@ -start,len +start,len @@
1267 1288 self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
1268 1289 self.startb, self.lenb)
1269 1290 self.hunk[0] = self.desc
1270 1291 self._fixnewline(lr)
1271 1292
1272 1293 def _fixnewline(self, lr):
1273 1294 l = lr.readline()
1274 1295 if l.startswith('\ '):
1275 1296 diffhelpers.fix_newline(self.hunk, self.a, self.b)
1276 1297 else:
1277 1298 lr.push(l)
1278 1299
1279 1300 def complete(self):
1280 1301 return len(self.a) == self.lena and len(self.b) == self.lenb
1281 1302
1282 1303 def _fuzzit(self, old, new, fuzz, toponly):
1283 1304 # this removes context lines from the top and bottom of list 'l'. It
1284 1305 # checks the hunk to make sure only context lines are removed, and then
1285 1306 # returns a new shortened list of lines.
1286 1307 fuzz = min(fuzz, len(old))
1287 1308 if fuzz:
1288 1309 top = 0
1289 1310 bot = 0
1290 1311 hlen = len(self.hunk)
1291 1312 for x in xrange(hlen - 1):
1292 1313 # the hunk starts with the @@ line, so use x+1
1293 1314 if self.hunk[x + 1][0] == ' ':
1294 1315 top += 1
1295 1316 else:
1296 1317 break
1297 1318 if not toponly:
1298 1319 for x in xrange(hlen - 1):
1299 1320 if self.hunk[hlen - bot - 1][0] == ' ':
1300 1321 bot += 1
1301 1322 else:
1302 1323 break
1303 1324
1304 1325 bot = min(fuzz, bot)
1305 1326 top = min(fuzz, top)
1306 1327 return old[top:len(old) - bot], new[top:len(new) - bot], top
1307 1328 return old, new, 0
1308 1329
1309 1330 def fuzzit(self, fuzz, toponly):
1310 1331 old, new, top = self._fuzzit(self.a, self.b, fuzz, toponly)
1311 1332 oldstart = self.starta + top
1312 1333 newstart = self.startb + top
1313 1334 # zero length hunk ranges already have their start decremented
1314 1335 if self.lena and oldstart > 0:
1315 1336 oldstart -= 1
1316 1337 if self.lenb and newstart > 0:
1317 1338 newstart -= 1
1318 1339 return old, oldstart, new, newstart
1319 1340
1320 1341 class binhunk(object):
1321 1342 'A binary patch file.'
1322 1343 def __init__(self, lr, fname):
1323 1344 self.text = None
1324 1345 self.delta = False
1325 1346 self.hunk = ['GIT binary patch\n']
1326 1347 self._fname = fname
1327 1348 self._read(lr)
1328 1349
1329 1350 def complete(self):
1330 1351 return self.text is not None
1331 1352
1332 1353 def new(self, lines):
1333 1354 if self.delta:
1334 1355 return [applybindelta(self.text, ''.join(lines))]
1335 1356 return [self.text]
1336 1357
1337 1358 def _read(self, lr):
1338 1359 def getline(lr, hunk):
1339 1360 l = lr.readline()
1340 1361 hunk.append(l)
1341 1362 return l.rstrip('\r\n')
1342 1363
1343 1364 size = 0
1344 1365 while True:
1345 1366 line = getline(lr, self.hunk)
1346 1367 if not line:
1347 1368 raise PatchError(_('could not extract "%s" binary data')
1348 1369 % self._fname)
1349 1370 if line.startswith('literal '):
1350 1371 size = int(line[8:].rstrip())
1351 1372 break
1352 1373 if line.startswith('delta '):
1353 1374 size = int(line[6:].rstrip())
1354 1375 self.delta = True
1355 1376 break
1356 1377 dec = []
1357 1378 line = getline(lr, self.hunk)
1358 1379 while len(line) > 1:
1359 1380 l = line[0]
1360 1381 if l <= 'Z' and l >= 'A':
1361 1382 l = ord(l) - ord('A') + 1
1362 1383 else:
1363 1384 l = ord(l) - ord('a') + 27
1364 1385 try:
1365 1386 dec.append(base85.b85decode(line[1:])[:l])
1366 1387 except ValueError as e:
1367 1388 raise PatchError(_('could not decode "%s" binary patch: %s')
1368 1389 % (self._fname, str(e)))
1369 1390 line = getline(lr, self.hunk)
1370 1391 text = zlib.decompress(''.join(dec))
1371 1392 if len(text) != size:
1372 1393 raise PatchError(_('"%s" length is %d bytes, should be %d')
1373 1394 % (self._fname, len(text), size))
1374 1395 self.text = text
1375 1396
1376 1397 def parsefilename(str):
1377 1398 # --- filename \t|space stuff
1378 1399 s = str[4:].rstrip('\r\n')
1379 1400 i = s.find('\t')
1380 1401 if i < 0:
1381 1402 i = s.find(' ')
1382 1403 if i < 0:
1383 1404 return s
1384 1405 return s[:i]
1385 1406
1386 1407 def reversehunks(hunks):
1387 1408 '''reverse the signs in the hunks given as argument
1388 1409
1389 1410 This function operates on hunks coming out of patch.filterpatch, that is
1390 1411 a list of the form: [header1, hunk1, hunk2, header2...]. Example usage:
1391 1412
1392 1413 >>> rawpatch = """diff --git a/folder1/g b/folder1/g
1393 1414 ... --- a/folder1/g
1394 1415 ... +++ b/folder1/g
1395 1416 ... @@ -1,7 +1,7 @@
1396 1417 ... +firstline
1397 1418 ... c
1398 1419 ... 1
1399 1420 ... 2
1400 1421 ... + 3
1401 1422 ... -4
1402 1423 ... 5
1403 1424 ... d
1404 1425 ... +lastline"""
1405 1426 >>> hunks = parsepatch(rawpatch)
1406 1427 >>> hunkscomingfromfilterpatch = []
1407 1428 >>> for h in hunks:
1408 1429 ... hunkscomingfromfilterpatch.append(h)
1409 1430 ... hunkscomingfromfilterpatch.extend(h.hunks)
1410 1431
1411 1432 >>> reversedhunks = reversehunks(hunkscomingfromfilterpatch)
1412 1433 >>> fp = cStringIO.StringIO()
1413 1434 >>> for c in reversedhunks:
1414 1435 ... c.write(fp)
1415 1436 >>> fp.seek(0)
1416 1437 >>> reversedpatch = fp.read()
1417 1438 >>> print reversedpatch
1418 1439 diff --git a/folder1/g b/folder1/g
1419 1440 --- a/folder1/g
1420 1441 +++ b/folder1/g
1421 1442 @@ -1,4 +1,3 @@
1422 1443 -firstline
1423 1444 c
1424 1445 1
1425 1446 2
1426 1447 @@ -1,6 +2,6 @@
1427 1448 c
1428 1449 1
1429 1450 2
1430 1451 - 3
1431 1452 +4
1432 1453 5
1433 1454 d
1434 1455 @@ -5,3 +6,2 @@
1435 1456 5
1436 1457 d
1437 1458 -lastline
1438 1459
1439 1460 '''
1440 1461
1441 1462 import crecord as crecordmod
1442 1463 newhunks = []
1443 1464 for c in hunks:
1444 1465 if isinstance(c, crecordmod.uihunk):
1445 1466 # curses hunks encapsulate the record hunk in _hunk
1446 1467 c = c._hunk
1447 1468 if isinstance(c, recordhunk):
1448 1469 for j, line in enumerate(c.hunk):
1449 1470 if line.startswith("-"):
1450 1471 c.hunk[j] = "+" + c.hunk[j][1:]
1451 1472 elif line.startswith("+"):
1452 1473 c.hunk[j] = "-" + c.hunk[j][1:]
1453 1474 c.added, c.removed = c.removed, c.added
1454 1475 newhunks.append(c)
1455 1476 return newhunks
1456 1477
1457 1478 def parsepatch(originalchunks):
1458 1479 """patch -> [] of headers -> [] of hunks """
1459 1480 class parser(object):
1460 1481 """patch parsing state machine"""
1461 1482 def __init__(self):
1462 1483 self.fromline = 0
1463 1484 self.toline = 0
1464 1485 self.proc = ''
1465 1486 self.header = None
1466 1487 self.context = []
1467 1488 self.before = []
1468 1489 self.hunk = []
1469 1490 self.headers = []
1470 1491
1471 1492 def addrange(self, limits):
1472 1493 fromstart, fromend, tostart, toend, proc = limits
1473 1494 self.fromline = int(fromstart)
1474 1495 self.toline = int(tostart)
1475 1496 self.proc = proc
1476 1497
1477 1498 def addcontext(self, context):
1478 1499 if self.hunk:
1479 1500 h = recordhunk(self.header, self.fromline, self.toline,
1480 1501 self.proc, self.before, self.hunk, context)
1481 1502 self.header.hunks.append(h)
1482 1503 self.fromline += len(self.before) + h.removed
1483 1504 self.toline += len(self.before) + h.added
1484 1505 self.before = []
1485 1506 self.hunk = []
1486 1507 self.proc = ''
1487 1508 self.context = context
1488 1509
1489 1510 def addhunk(self, hunk):
1490 1511 if self.context:
1491 1512 self.before = self.context
1492 1513 self.context = []
1493 1514 self.hunk = hunk
1494 1515
1495 1516 def newfile(self, hdr):
1496 1517 self.addcontext([])
1497 1518 h = header(hdr)
1498 1519 self.headers.append(h)
1499 1520 self.header = h
1500 1521
1501 1522 def addother(self, line):
1502 1523 pass # 'other' lines are ignored
1503 1524
1504 1525 def finished(self):
1505 1526 self.addcontext([])
1506 1527 return self.headers
1507 1528
1508 1529 transitions = {
1509 1530 'file': {'context': addcontext,
1510 1531 'file': newfile,
1511 1532 'hunk': addhunk,
1512 1533 'range': addrange},
1513 1534 'context': {'file': newfile,
1514 1535 'hunk': addhunk,
1515 1536 'range': addrange,
1516 1537 'other': addother},
1517 1538 'hunk': {'context': addcontext,
1518 1539 'file': newfile,
1519 1540 'range': addrange},
1520 1541 'range': {'context': addcontext,
1521 1542 'hunk': addhunk},
1522 1543 'other': {'other': addother},
1523 1544 }
1524 1545
1525 1546 p = parser()
1526 1547 fp = cStringIO.StringIO()
1527 1548 fp.write(''.join(originalchunks))
1528 1549 fp.seek(0)
1529 1550
1530 1551 state = 'context'
1531 1552 for newstate, data in scanpatch(fp):
1532 1553 try:
1533 1554 p.transitions[state][newstate](p, data)
1534 1555 except KeyError:
1535 1556 raise PatchError('unhandled transition: %s -> %s' %
1536 1557 (state, newstate))
1537 1558 state = newstate
1538 1559 del fp
1539 1560 return p.finished()
1540 1561
1541 1562 def pathtransform(path, strip, prefix):
1542 1563 '''turn a path from a patch into a path suitable for the repository
1543 1564
1544 1565 prefix, if not empty, is expected to be normalized with a / at the end.
1545 1566
1546 1567 Returns (stripped components, path in repository).
1547 1568
1548 1569 >>> pathtransform('a/b/c', 0, '')
1549 1570 ('', 'a/b/c')
1550 1571 >>> pathtransform(' a/b/c ', 0, '')
1551 1572 ('', ' a/b/c')
1552 1573 >>> pathtransform(' a/b/c ', 2, '')
1553 1574 ('a/b/', 'c')
1554 1575 >>> pathtransform('a/b/c', 0, 'd/e/')
1555 1576 ('', 'd/e/a/b/c')
1556 1577 >>> pathtransform(' a//b/c ', 2, 'd/e/')
1557 1578 ('a//b/', 'd/e/c')
1558 1579 >>> pathtransform('a/b/c', 3, '')
1559 1580 Traceback (most recent call last):
1560 1581 PatchError: unable to strip away 1 of 3 dirs from a/b/c
1561 1582 '''
1562 1583 pathlen = len(path)
1563 1584 i = 0
1564 1585 if strip == 0:
1565 1586 return '', prefix + path.rstrip()
1566 1587 count = strip
1567 1588 while count > 0:
1568 1589 i = path.find('/', i)
1569 1590 if i == -1:
1570 1591 raise PatchError(_("unable to strip away %d of %d dirs from %s") %
1571 1592 (count, strip, path))
1572 1593 i += 1
1573 1594 # consume '//' in the path
1574 1595 while i < pathlen - 1 and path[i] == '/':
1575 1596 i += 1
1576 1597 count -= 1
1577 1598 return path[:i].lstrip(), prefix + path[i:].rstrip()
1578 1599
1579 1600 def makepatchmeta(backend, afile_orig, bfile_orig, hunk, strip, prefix):
1580 1601 nulla = afile_orig == "/dev/null"
1581 1602 nullb = bfile_orig == "/dev/null"
1582 1603 create = nulla and hunk.starta == 0 and hunk.lena == 0
1583 1604 remove = nullb and hunk.startb == 0 and hunk.lenb == 0
1584 1605 abase, afile = pathtransform(afile_orig, strip, prefix)
1585 1606 gooda = not nulla and backend.exists(afile)
1586 1607 bbase, bfile = pathtransform(bfile_orig, strip, prefix)
1587 1608 if afile == bfile:
1588 1609 goodb = gooda
1589 1610 else:
1590 1611 goodb = not nullb and backend.exists(bfile)
1591 1612 missing = not goodb and not gooda and not create
1592 1613
1593 1614 # some diff programs apparently produce patches where the afile is
1594 1615 # not /dev/null, but afile starts with bfile
1595 1616 abasedir = afile[:afile.rfind('/') + 1]
1596 1617 bbasedir = bfile[:bfile.rfind('/') + 1]
1597 1618 if (missing and abasedir == bbasedir and afile.startswith(bfile)
1598 1619 and hunk.starta == 0 and hunk.lena == 0):
1599 1620 create = True
1600 1621 missing = False
1601 1622
1602 1623 # If afile is "a/b/foo" and bfile is "a/b/foo.orig" we assume the
1603 1624 # diff is between a file and its backup. In this case, the original
1604 1625 # file should be patched (see original mpatch code).
1605 1626 isbackup = (abase == bbase and bfile.startswith(afile))
1606 1627 fname = None
1607 1628 if not missing:
1608 1629 if gooda and goodb:
1609 1630 if isbackup:
1610 1631 fname = afile
1611 1632 else:
1612 1633 fname = bfile
1613 1634 elif gooda:
1614 1635 fname = afile
1615 1636
1616 1637 if not fname:
1617 1638 if not nullb:
1618 1639 if isbackup:
1619 1640 fname = afile
1620 1641 else:
1621 1642 fname = bfile
1622 1643 elif not nulla:
1623 1644 fname = afile
1624 1645 else:
1625 1646 raise PatchError(_("undefined source and destination files"))
1626 1647
1627 1648 gp = patchmeta(fname)
1628 1649 if create:
1629 1650 gp.op = 'ADD'
1630 1651 elif remove:
1631 1652 gp.op = 'DELETE'
1632 1653 return gp
1633 1654
1634 1655 def scanpatch(fp):
1635 1656 """like patch.iterhunks, but yield different events
1636 1657
1637 1658 - ('file', [header_lines + fromfile + tofile])
1638 1659 - ('context', [context_lines])
1639 1660 - ('hunk', [hunk_lines])
1640 1661 - ('range', (-start,len, +start,len, proc))
1641 1662 """
1642 1663 lines_re = re.compile(r'@@ -(\d+),(\d+) \+(\d+),(\d+) @@\s*(.*)')
1643 1664 lr = linereader(fp)
1644 1665
1645 1666 def scanwhile(first, p):
1646 1667 """scan lr while predicate holds"""
1647 1668 lines = [first]
1648 1669 while True:
1649 1670 line = lr.readline()
1650 1671 if not line:
1651 1672 break
1652 1673 if p(line):
1653 1674 lines.append(line)
1654 1675 else:
1655 1676 lr.push(line)
1656 1677 break
1657 1678 return lines
1658 1679
1659 1680 while True:
1660 1681 line = lr.readline()
1661 1682 if not line:
1662 1683 break
1663 1684 if line.startswith('diff --git a/') or line.startswith('diff -r '):
1664 1685 def notheader(line):
1665 1686 s = line.split(None, 1)
1666 1687 return not s or s[0] not in ('---', 'diff')
1667 1688 header = scanwhile(line, notheader)
1668 1689 fromfile = lr.readline()
1669 1690 if fromfile.startswith('---'):
1670 1691 tofile = lr.readline()
1671 1692 header += [fromfile, tofile]
1672 1693 else:
1673 1694 lr.push(fromfile)
1674 1695 yield 'file', header
1675 1696 elif line[0] == ' ':
1676 1697 yield 'context', scanwhile(line, lambda l: l[0] in ' \\')
1677 1698 elif line[0] in '-+':
1678 1699 yield 'hunk', scanwhile(line, lambda l: l[0] in '-+\\')
1679 1700 else:
1680 1701 m = lines_re.match(line)
1681 1702 if m:
1682 1703 yield 'range', m.groups()
1683 1704 else:
1684 1705 yield 'other', line
1685 1706
1686 1707 def scangitpatch(lr, firstline):
1687 1708 """
1688 1709 Git patches can emit:
1689 1710 - rename a to b
1690 1711 - change b
1691 1712 - copy a to c
1692 1713 - change c
1693 1714
1694 1715 We cannot apply this sequence as-is, the renamed 'a' could not be
1695 1716 found for it would have been renamed already. And we cannot copy
1696 1717 from 'b' instead because 'b' would have been changed already. So
1697 1718 we scan the git patch for copy and rename commands so we can
1698 1719 perform the copies ahead of time.
1699 1720 """
1700 1721 pos = 0
1701 1722 try:
1702 1723 pos = lr.fp.tell()
1703 1724 fp = lr.fp
1704 1725 except IOError:
1705 1726 fp = cStringIO.StringIO(lr.fp.read())
1706 1727 gitlr = linereader(fp)
1707 1728 gitlr.push(firstline)
1708 1729 gitpatches = readgitpatch(gitlr)
1709 1730 fp.seek(pos)
1710 1731 return gitpatches
1711 1732
1712 1733 def iterhunks(fp):
1713 1734 """Read a patch and yield the following events:
1714 1735 - ("file", afile, bfile, firsthunk): select a new target file.
1715 1736 - ("hunk", hunk): a new hunk is ready to be applied, follows a
1716 1737 "file" event.
1717 1738 - ("git", gitchanges): current diff is in git format, gitchanges
1718 1739 maps filenames to gitpatch records. Unique event.
1719 1740 """
1720 1741 afile = ""
1721 1742 bfile = ""
1722 1743 state = None
1723 1744 hunknum = 0
1724 1745 emitfile = newfile = False
1725 1746 gitpatches = None
1726 1747
1727 1748 # our states
1728 1749 BFILE = 1
1729 1750 context = None
1730 1751 lr = linereader(fp)
1731 1752
1732 1753 while True:
1733 1754 x = lr.readline()
1734 1755 if not x:
1735 1756 break
1736 1757 if state == BFILE and (
1737 1758 (not context and x[0] == '@')
1738 1759 or (context is not False and x.startswith('***************'))
1739 1760 or x.startswith('GIT binary patch')):
1740 1761 gp = None
1741 1762 if (gitpatches and
1742 1763 gitpatches[-1].ispatching(afile, bfile)):
1743 1764 gp = gitpatches.pop()
1744 1765 if x.startswith('GIT binary patch'):
1745 1766 h = binhunk(lr, gp.path)
1746 1767 else:
1747 1768 if context is None and x.startswith('***************'):
1748 1769 context = True
1749 1770 h = hunk(x, hunknum + 1, lr, context)
1750 1771 hunknum += 1
1751 1772 if emitfile:
1752 1773 emitfile = False
1753 1774 yield 'file', (afile, bfile, h, gp and gp.copy() or None)
1754 1775 yield 'hunk', h
1755 1776 elif x.startswith('diff --git a/'):
1756 1777 m = gitre.match(x.rstrip(' \r\n'))
1757 1778 if not m:
1758 1779 continue
1759 1780 if gitpatches is None:
1760 1781 # scan whole input for git metadata
1761 1782 gitpatches = scangitpatch(lr, x)
1762 1783 yield 'git', [g.copy() for g in gitpatches
1763 1784 if g.op in ('COPY', 'RENAME')]
1764 1785 gitpatches.reverse()
1765 1786 afile = 'a/' + m.group(1)
1766 1787 bfile = 'b/' + m.group(2)
1767 1788 while gitpatches and not gitpatches[-1].ispatching(afile, bfile):
1768 1789 gp = gitpatches.pop()
1769 1790 yield 'file', ('a/' + gp.path, 'b/' + gp.path, None, gp.copy())
1770 1791 if not gitpatches:
1771 1792 raise PatchError(_('failed to synchronize metadata for "%s"')
1772 1793 % afile[2:])
1773 1794 gp = gitpatches[-1]
1774 1795 newfile = True
1775 1796 elif x.startswith('---'):
1776 1797 # check for a unified diff
1777 1798 l2 = lr.readline()
1778 1799 if not l2.startswith('+++'):
1779 1800 lr.push(l2)
1780 1801 continue
1781 1802 newfile = True
1782 1803 context = False
1783 1804 afile = parsefilename(x)
1784 1805 bfile = parsefilename(l2)
1785 1806 elif x.startswith('***'):
1786 1807 # check for a context diff
1787 1808 l2 = lr.readline()
1788 1809 if not l2.startswith('---'):
1789 1810 lr.push(l2)
1790 1811 continue
1791 1812 l3 = lr.readline()
1792 1813 lr.push(l3)
1793 1814 if not l3.startswith("***************"):
1794 1815 lr.push(l2)
1795 1816 continue
1796 1817 newfile = True
1797 1818 context = True
1798 1819 afile = parsefilename(x)
1799 1820 bfile = parsefilename(l2)
1800 1821
1801 1822 if newfile:
1802 1823 newfile = False
1803 1824 emitfile = True
1804 1825 state = BFILE
1805 1826 hunknum = 0
1806 1827
1807 1828 while gitpatches:
1808 1829 gp = gitpatches.pop()
1809 1830 yield 'file', ('a/' + gp.path, 'b/' + gp.path, None, gp.copy())
1810 1831
1811 1832 def applybindelta(binchunk, data):
1812 1833 """Apply a binary delta hunk
1813 1834 The algorithm used is the algorithm from git's patch-delta.c
1814 1835 """
1815 1836 def deltahead(binchunk):
1816 1837 i = 0
1817 1838 for c in binchunk:
1818 1839 i += 1
1819 1840 if not (ord(c) & 0x80):
1820 1841 return i
1821 1842 return i
1822 1843 out = ""
1823 1844 s = deltahead(binchunk)
1824 1845 binchunk = binchunk[s:]
1825 1846 s = deltahead(binchunk)
1826 1847 binchunk = binchunk[s:]
1827 1848 i = 0
1828 1849 while i < len(binchunk):
1829 1850 cmd = ord(binchunk[i])
1830 1851 i += 1
1831 1852 if (cmd & 0x80):
1832 1853 offset = 0
1833 1854 size = 0
1834 1855 if (cmd & 0x01):
1835 1856 offset = ord(binchunk[i])
1836 1857 i += 1
1837 1858 if (cmd & 0x02):
1838 1859 offset |= ord(binchunk[i]) << 8
1839 1860 i += 1
1840 1861 if (cmd & 0x04):
1841 1862 offset |= ord(binchunk[i]) << 16
1842 1863 i += 1
1843 1864 if (cmd & 0x08):
1844 1865 offset |= ord(binchunk[i]) << 24
1845 1866 i += 1
1846 1867 if (cmd & 0x10):
1847 1868 size = ord(binchunk[i])
1848 1869 i += 1
1849 1870 if (cmd & 0x20):
1850 1871 size |= ord(binchunk[i]) << 8
1851 1872 i += 1
1852 1873 if (cmd & 0x40):
1853 1874 size |= ord(binchunk[i]) << 16
1854 1875 i += 1
1855 1876 if size == 0:
1856 1877 size = 0x10000
1857 1878 offset_end = offset + size
1858 1879 out += data[offset:offset_end]
1859 1880 elif cmd != 0:
1860 1881 offset_end = i + cmd
1861 1882 out += binchunk[i:offset_end]
1862 1883 i += cmd
1863 1884 else:
1864 1885 raise PatchError(_('unexpected delta opcode 0'))
1865 1886 return out
1866 1887
1867 1888 def applydiff(ui, fp, backend, store, strip=1, prefix='', eolmode='strict'):
1868 1889 """Reads a patch from fp and tries to apply it.
1869 1890
1870 1891 Returns 0 for a clean patch, -1 if any rejects were found and 1 if
1871 1892 there was any fuzz.
1872 1893
1873 1894 If 'eolmode' is 'strict', the patch content and patched file are
1874 1895 read in binary mode. Otherwise, line endings are ignored when
1875 1896 patching then normalized according to 'eolmode'.
1876 1897 """
1877 1898 return _applydiff(ui, fp, patchfile, backend, store, strip=strip,
1878 1899 prefix=prefix, eolmode=eolmode)
1879 1900
1880 1901 def _applydiff(ui, fp, patcher, backend, store, strip=1, prefix='',
1881 1902 eolmode='strict'):
1882 1903
1883 1904 if prefix:
1884 1905 prefix = pathutil.canonpath(backend.repo.root, backend.repo.getcwd(),
1885 1906 prefix)
1886 1907 if prefix != '':
1887 1908 prefix += '/'
1888 1909 def pstrip(p):
1889 1910 return pathtransform(p, strip - 1, prefix)[1]
1890 1911
1891 1912 rejects = 0
1892 1913 err = 0
1893 1914 current_file = None
1894 1915
1895 1916 for state, values in iterhunks(fp):
1896 1917 if state == 'hunk':
1897 1918 if not current_file:
1898 1919 continue
1899 1920 ret = current_file.apply(values)
1900 1921 if ret > 0:
1901 1922 err = 1
1902 1923 elif state == 'file':
1903 1924 if current_file:
1904 1925 rejects += current_file.close()
1905 1926 current_file = None
1906 1927 afile, bfile, first_hunk, gp = values
1907 1928 if gp:
1908 1929 gp.path = pstrip(gp.path)
1909 1930 if gp.oldpath:
1910 1931 gp.oldpath = pstrip(gp.oldpath)
1911 1932 else:
1912 1933 gp = makepatchmeta(backend, afile, bfile, first_hunk, strip,
1913 1934 prefix)
1914 1935 if gp.op == 'RENAME':
1915 1936 backend.unlink(gp.oldpath)
1916 1937 if not first_hunk:
1917 1938 if gp.op == 'DELETE':
1918 1939 backend.unlink(gp.path)
1919 1940 continue
1920 1941 data, mode = None, None
1921 1942 if gp.op in ('RENAME', 'COPY'):
1922 1943 data, mode = store.getfile(gp.oldpath)[:2]
1923 1944 # FIXME: failing getfile has never been handled here
1924 1945 assert data is not None
1925 1946 if gp.mode:
1926 1947 mode = gp.mode
1927 1948 if gp.op == 'ADD':
1928 1949 # Added files without content have no hunk and
1929 1950 # must be created
1930 1951 data = ''
1931 1952 if data or mode:
1932 1953 if (gp.op in ('ADD', 'RENAME', 'COPY')
1933 1954 and backend.exists(gp.path)):
1934 1955 raise PatchError(_("cannot create %s: destination "
1935 1956 "already exists") % gp.path)
1936 1957 backend.setfile(gp.path, data, mode, gp.oldpath)
1937 1958 continue
1938 1959 try:
1939 1960 current_file = patcher(ui, gp, backend, store,
1940 1961 eolmode=eolmode)
1941 1962 except PatchError as inst:
1942 1963 ui.warn(str(inst) + '\n')
1943 1964 current_file = None
1944 1965 rejects += 1
1945 1966 continue
1946 1967 elif state == 'git':
1947 1968 for gp in values:
1948 1969 path = pstrip(gp.oldpath)
1949 1970 data, mode = backend.getfile(path)
1950 1971 if data is None:
1951 1972 # The error ignored here will trigger a getfile()
1952 1973 # error in a place more appropriate for error
1953 1974 # handling, and will not interrupt the patching
1954 1975 # process.
1955 1976 pass
1956 1977 else:
1957 1978 store.setfile(path, data, mode)
1958 1979 else:
1959 1980 raise util.Abort(_('unsupported parser state: %s') % state)
1960 1981
1961 1982 if current_file:
1962 1983 rejects += current_file.close()
1963 1984
1964 1985 if rejects:
1965 1986 return -1
1966 1987 return err
1967 1988
1968 1989 def _externalpatch(ui, repo, patcher, patchname, strip, files,
1969 1990 similarity):
1970 1991 """use <patcher> to apply <patchname> to the working directory.
1971 1992 returns whether patch was applied with fuzz factor."""
1972 1993
1973 1994 fuzz = False
1974 1995 args = []
1975 1996 cwd = repo.root
1976 1997 if cwd:
1977 1998 args.append('-d %s' % util.shellquote(cwd))
1978 1999 fp = util.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
1979 2000 util.shellquote(patchname)))
1980 2001 try:
1981 2002 for line in fp:
1982 2003 line = line.rstrip()
1983 2004 ui.note(line + '\n')
1984 2005 if line.startswith('patching file '):
1985 2006 pf = util.parsepatchoutput(line)
1986 2007 printed_file = False
1987 2008 files.add(pf)
1988 2009 elif line.find('with fuzz') >= 0:
1989 2010 fuzz = True
1990 2011 if not printed_file:
1991 2012 ui.warn(pf + '\n')
1992 2013 printed_file = True
1993 2014 ui.warn(line + '\n')
1994 2015 elif line.find('saving rejects to file') >= 0:
1995 2016 ui.warn(line + '\n')
1996 2017 elif line.find('FAILED') >= 0:
1997 2018 if not printed_file:
1998 2019 ui.warn(pf + '\n')
1999 2020 printed_file = True
2000 2021 ui.warn(line + '\n')
2001 2022 finally:
2002 2023 if files:
2003 2024 scmutil.marktouched(repo, files, similarity)
2004 2025 code = fp.close()
2005 2026 if code:
2006 2027 raise PatchError(_("patch command failed: %s") %
2007 2028 util.explainexit(code)[0])
2008 2029 return fuzz
2009 2030
2010 2031 def patchbackend(ui, backend, patchobj, strip, prefix, files=None,
2011 2032 eolmode='strict'):
2012 2033 if files is None:
2013 2034 files = set()
2014 2035 if eolmode is None:
2015 2036 eolmode = ui.config('patch', 'eol', 'strict')
2016 2037 if eolmode.lower() not in eolmodes:
2017 2038 raise util.Abort(_('unsupported line endings type: %s') % eolmode)
2018 2039 eolmode = eolmode.lower()
2019 2040
2020 2041 store = filestore()
2021 2042 try:
2022 2043 fp = open(patchobj, 'rb')
2023 2044 except TypeError:
2024 2045 fp = patchobj
2025 2046 try:
2026 2047 ret = applydiff(ui, fp, backend, store, strip=strip, prefix=prefix,
2027 2048 eolmode=eolmode)
2028 2049 finally:
2029 2050 if fp != patchobj:
2030 2051 fp.close()
2031 2052 files.update(backend.close())
2032 2053 store.close()
2033 2054 if ret < 0:
2034 2055 raise PatchError(_('patch failed to apply'))
2035 2056 return ret > 0
2036 2057
2037 2058 def internalpatch(ui, repo, patchobj, strip, prefix='', files=None,
2038 2059 eolmode='strict', similarity=0):
2039 2060 """use builtin patch to apply <patchobj> to the working directory.
2040 2061 returns whether patch was applied with fuzz factor."""
2041 2062 backend = workingbackend(ui, repo, similarity)
2042 2063 return patchbackend(ui, backend, patchobj, strip, prefix, files, eolmode)
2043 2064
2044 2065 def patchrepo(ui, repo, ctx, store, patchobj, strip, prefix, files=None,
2045 2066 eolmode='strict'):
2046 2067 backend = repobackend(ui, repo, ctx, store)
2047 2068 return patchbackend(ui, backend, patchobj, strip, prefix, files, eolmode)
2048 2069
2049 2070 def patch(ui, repo, patchname, strip=1, prefix='', files=None, eolmode='strict',
2050 2071 similarity=0):
2051 2072 """Apply <patchname> to the working directory.
2052 2073
2053 2074 'eolmode' specifies how end of lines should be handled. It can be:
2054 2075 - 'strict': inputs are read in binary mode, EOLs are preserved
2055 2076 - 'crlf': EOLs are ignored when patching and reset to CRLF
2056 2077 - 'lf': EOLs are ignored when patching and reset to LF
2057 2078 - None: get it from user settings, default to 'strict'
2058 2079 'eolmode' is ignored when using an external patcher program.
2059 2080
2060 2081 Returns whether patch was applied with fuzz factor.
2061 2082 """
2062 2083 patcher = ui.config('ui', 'patch')
2063 2084 if files is None:
2064 2085 files = set()
2065 2086 if patcher:
2066 2087 return _externalpatch(ui, repo, patcher, patchname, strip,
2067 2088 files, similarity)
2068 2089 return internalpatch(ui, repo, patchname, strip, prefix, files, eolmode,
2069 2090 similarity)
2070 2091
2071 2092 def changedfiles(ui, repo, patchpath, strip=1):
2072 2093 backend = fsbackend(ui, repo.root)
2073 2094 fp = open(patchpath, 'rb')
2074 2095 try:
2075 2096 changed = set()
2076 2097 for state, values in iterhunks(fp):
2077 2098 if state == 'file':
2078 2099 afile, bfile, first_hunk, gp = values
2079 2100 if gp:
2080 2101 gp.path = pathtransform(gp.path, strip - 1, '')[1]
2081 2102 if gp.oldpath:
2082 2103 gp.oldpath = pathtransform(gp.oldpath, strip - 1, '')[1]
2083 2104 else:
2084 2105 gp = makepatchmeta(backend, afile, bfile, first_hunk, strip,
2085 2106 '')
2086 2107 changed.add(gp.path)
2087 2108 if gp.op == 'RENAME':
2088 2109 changed.add(gp.oldpath)
2089 2110 elif state not in ('hunk', 'git'):
2090 2111 raise util.Abort(_('unsupported parser state: %s') % state)
2091 2112 return changed
2092 2113 finally:
2093 2114 fp.close()
2094 2115
2095 2116 class GitDiffRequired(Exception):
2096 2117 pass
2097 2118
2098 2119 def diffallopts(ui, opts=None, untrusted=False, section='diff'):
2099 2120 '''return diffopts with all features supported and parsed'''
2100 2121 return difffeatureopts(ui, opts=opts, untrusted=untrusted, section=section,
2101 2122 git=True, whitespace=True, formatchanging=True)
2102 2123
2103 2124 diffopts = diffallopts
2104 2125
2105 2126 def difffeatureopts(ui, opts=None, untrusted=False, section='diff', git=False,
2106 2127 whitespace=False, formatchanging=False):
2107 2128 '''return diffopts with only opted-in features parsed
2108 2129
2109 2130 Features:
2110 2131 - git: git-style diffs
2111 2132 - whitespace: whitespace options like ignoreblanklines and ignorews
2112 2133 - formatchanging: options that will likely break or cause correctness issues
2113 2134 with most diff parsers
2114 2135 '''
2115 2136 def get(key, name=None, getter=ui.configbool, forceplain=None):
2116 2137 if opts:
2117 2138 v = opts.get(key)
2118 2139 if v:
2119 2140 return v
2120 2141 if forceplain is not None and ui.plain():
2121 2142 return forceplain
2122 2143 return getter(section, name or key, None, untrusted=untrusted)
2123 2144
2124 2145 # core options, expected to be understood by every diff parser
2125 2146 buildopts = {
2126 2147 'nodates': get('nodates'),
2127 2148 'showfunc': get('show_function', 'showfunc'),
2128 2149 'context': get('unified', getter=ui.config),
2129 2150 }
2130 2151
2131 2152 if git:
2132 2153 buildopts['git'] = get('git')
2133 2154 if whitespace:
2134 2155 buildopts['ignorews'] = get('ignore_all_space', 'ignorews')
2135 2156 buildopts['ignorewsamount'] = get('ignore_space_change',
2136 2157 'ignorewsamount')
2137 2158 buildopts['ignoreblanklines'] = get('ignore_blank_lines',
2138 2159 'ignoreblanklines')
2139 2160 if formatchanging:
2140 2161 buildopts['text'] = opts and opts.get('text')
2141 2162 buildopts['nobinary'] = get('nobinary')
2142 2163 buildopts['noprefix'] = get('noprefix', forceplain=False)
2143 2164
2144 2165 return mdiff.diffopts(**buildopts)
2145 2166
2146 2167 def diff(repo, node1=None, node2=None, match=None, changes=None, opts=None,
2147 2168 losedatafn=None, prefix='', relroot=''):
2148 2169 '''yields diff of changes to files between two nodes, or node and
2149 2170 working directory.
2150 2171
2151 2172 if node1 is None, use first dirstate parent instead.
2152 2173 if node2 is None, compare node1 with working directory.
2153 2174
2154 2175 losedatafn(**kwarg) is a callable run when opts.upgrade=True and
2155 2176 every time some change cannot be represented with the current
2156 2177 patch format. Return False to upgrade to git patch format, True to
2157 2178 accept the loss or raise an exception to abort the diff. It is
2158 2179 called with the name of current file being diffed as 'fn'. If set
2159 2180 to None, patches will always be upgraded to git format when
2160 2181 necessary.
2161 2182
2162 2183 prefix is a filename prefix that is prepended to all filenames on
2163 2184 display (used for subrepos).
2164 2185
2165 2186 relroot, if not empty, must be normalized with a trailing /. Any match
2166 2187 patterns that fall outside it will be ignored.'''
2167 2188
2168 2189 if opts is None:
2169 2190 opts = mdiff.defaultopts
2170 2191
2171 2192 if not node1 and not node2:
2172 2193 node1 = repo.dirstate.p1()
2173 2194
2174 2195 def lrugetfilectx():
2175 2196 cache = {}
2176 2197 order = collections.deque()
2177 2198 def getfilectx(f, ctx):
2178 2199 fctx = ctx.filectx(f, filelog=cache.get(f))
2179 2200 if f not in cache:
2180 2201 if len(cache) > 20:
2181 2202 del cache[order.popleft()]
2182 2203 cache[f] = fctx.filelog()
2183 2204 else:
2184 2205 order.remove(f)
2185 2206 order.append(f)
2186 2207 return fctx
2187 2208 return getfilectx
2188 2209 getfilectx = lrugetfilectx()
2189 2210
2190 2211 ctx1 = repo[node1]
2191 2212 ctx2 = repo[node2]
2192 2213
2193 2214 relfiltered = False
2194 2215 if relroot != '' and match.always():
2195 2216 # as a special case, create a new matcher with just the relroot
2196 2217 pats = [relroot]
2197 2218 match = scmutil.match(ctx2, pats, default='path')
2198 2219 relfiltered = True
2199 2220
2200 2221 if not changes:
2201 2222 changes = repo.status(ctx1, ctx2, match=match)
2202 2223 modified, added, removed = changes[:3]
2203 2224
2204 2225 if not modified and not added and not removed:
2205 2226 return []
2206 2227
2207 2228 if repo.ui.debugflag:
2208 2229 hexfunc = hex
2209 2230 else:
2210 2231 hexfunc = short
2211 2232 revs = [hexfunc(node) for node in [ctx1.node(), ctx2.node()] if node]
2212 2233
2213 2234 copy = {}
2214 2235 if opts.git or opts.upgrade:
2215 2236 copy = copies.pathcopies(ctx1, ctx2, match=match)
2216 2237
2217 2238 if relroot is not None:
2218 2239 if not relfiltered:
2219 2240 # XXX this would ideally be done in the matcher, but that is
2220 2241 # generally meant to 'or' patterns, not 'and' them. In this case we
2221 2242 # need to 'and' all the patterns from the matcher with relroot.
2222 2243 def filterrel(l):
2223 2244 return [f for f in l if f.startswith(relroot)]
2224 2245 modified = filterrel(modified)
2225 2246 added = filterrel(added)
2226 2247 removed = filterrel(removed)
2227 2248 relfiltered = True
2228 2249 # filter out copies where either side isn't inside the relative root
2229 2250 copy = dict(((dst, src) for (dst, src) in copy.iteritems()
2230 2251 if dst.startswith(relroot)
2231 2252 and src.startswith(relroot)))
2232 2253
2233 2254 def difffn(opts, losedata):
2234 2255 return trydiff(repo, revs, ctx1, ctx2, modified, added, removed,
2235 2256 copy, getfilectx, opts, losedata, prefix, relroot)
2236 2257 if opts.upgrade and not opts.git:
2237 2258 try:
2238 2259 def losedata(fn):
2239 2260 if not losedatafn or not losedatafn(fn=fn):
2240 2261 raise GitDiffRequired
2241 2262 # Buffer the whole output until we are sure it can be generated
2242 2263 return list(difffn(opts.copy(git=False), losedata))
2243 2264 except GitDiffRequired:
2244 2265 return difffn(opts.copy(git=True), None)
2245 2266 else:
2246 2267 return difffn(opts, None)
2247 2268
2248 2269 def difflabel(func, *args, **kw):
2249 2270 '''yields 2-tuples of (output, label) based on the output of func()'''
2250 2271 headprefixes = [('diff', 'diff.diffline'),
2251 2272 ('copy', 'diff.extended'),
2252 2273 ('rename', 'diff.extended'),
2253 2274 ('old', 'diff.extended'),
2254 2275 ('new', 'diff.extended'),
2255 2276 ('deleted', 'diff.extended'),
2256 2277 ('---', 'diff.file_a'),
2257 2278 ('+++', 'diff.file_b')]
2258 2279 textprefixes = [('@', 'diff.hunk'),
2259 2280 ('-', 'diff.deleted'),
2260 2281 ('+', 'diff.inserted')]
2261 2282 head = False
2262 2283 for chunk in func(*args, **kw):
2263 2284 lines = chunk.split('\n')
2264 2285 for i, line in enumerate(lines):
2265 2286 if i != 0:
2266 2287 yield ('\n', '')
2267 2288 if head:
2268 2289 if line.startswith('@'):
2269 2290 head = False
2270 2291 else:
2271 2292 if line and line[0] not in ' +-@\\':
2272 2293 head = True
2273 2294 stripline = line
2274 2295 diffline = False
2275 2296 if not head and line and line[0] in '+-':
2276 2297 # highlight tabs and trailing whitespace, but only in
2277 2298 # changed lines
2278 2299 stripline = line.rstrip()
2279 2300 diffline = True
2280 2301
2281 2302 prefixes = textprefixes
2282 2303 if head:
2283 2304 prefixes = headprefixes
2284 2305 for prefix, label in prefixes:
2285 2306 if stripline.startswith(prefix):
2286 2307 if diffline:
2287 2308 for token in tabsplitter.findall(stripline):
2288 2309 if '\t' == token[0]:
2289 2310 yield (token, 'diff.tab')
2290 2311 else:
2291 2312 yield (token, label)
2292 2313 else:
2293 2314 yield (stripline, label)
2294 2315 break
2295 2316 else:
2296 2317 yield (line, '')
2297 2318 if line != stripline:
2298 2319 yield (line[len(stripline):], 'diff.trailingwhitespace')
2299 2320
2300 2321 def diffui(*args, **kw):
2301 2322 '''like diff(), but yields 2-tuples of (output, label) for ui.write()'''
2302 2323 return difflabel(diff, *args, **kw)
2303 2324
2304 2325 def _filepairs(ctx1, modified, added, removed, copy, opts):
2305 2326 '''generates tuples (f1, f2, copyop), where f1 is the name of the file
2306 2327 before and f2 is the the name after. For added files, f1 will be None,
2307 2328 and for removed files, f2 will be None. copyop may be set to None, 'copy'
2308 2329 or 'rename' (the latter two only if opts.git is set).'''
2309 2330 gone = set()
2310 2331
2311 2332 copyto = dict([(v, k) for k, v in copy.items()])
2312 2333
2313 2334 addedset, removedset = set(added), set(removed)
2314 2335 # Fix up added, since merged-in additions appear as
2315 2336 # modifications during merges
2316 2337 for f in modified:
2317 2338 if f not in ctx1:
2318 2339 addedset.add(f)
2319 2340
2320 2341 for f in sorted(modified + added + removed):
2321 2342 copyop = None
2322 2343 f1, f2 = f, f
2323 2344 if f in addedset:
2324 2345 f1 = None
2325 2346 if f in copy:
2326 2347 if opts.git:
2327 2348 f1 = copy[f]
2328 2349 if f1 in removedset and f1 not in gone:
2329 2350 copyop = 'rename'
2330 2351 gone.add(f1)
2331 2352 else:
2332 2353 copyop = 'copy'
2333 2354 elif f in removedset:
2334 2355 f2 = None
2335 2356 if opts.git:
2336 2357 # have we already reported a copy above?
2337 2358 if (f in copyto and copyto[f] in addedset
2338 2359 and copy[copyto[f]] == f):
2339 2360 continue
2340 2361 yield f1, f2, copyop
2341 2362
2342 2363 def trydiff(repo, revs, ctx1, ctx2, modified, added, removed,
2343 2364 copy, getfilectx, opts, losedatafn, prefix, relroot):
2344 2365 '''given input data, generate a diff and yield it in blocks
2345 2366
2346 2367 If generating a diff would lose data like flags or binary data and
2347 2368 losedatafn is not None, it will be called.
2348 2369
2349 2370 relroot is removed and prefix is added to every path in the diff output.
2350 2371
2351 2372 If relroot is not empty, this function expects every path in modified,
2352 2373 added, removed and copy to start with it.'''
2353 2374
2354 2375 def gitindex(text):
2355 2376 if not text:
2356 2377 text = ""
2357 2378 l = len(text)
2358 2379 s = util.sha1('blob %d\0' % l)
2359 2380 s.update(text)
2360 2381 return s.hexdigest()
2361 2382
2362 2383 if opts.noprefix:
2363 2384 aprefix = bprefix = ''
2364 2385 else:
2365 2386 aprefix = 'a/'
2366 2387 bprefix = 'b/'
2367 2388
2368 2389 def diffline(f, revs):
2369 2390 revinfo = ' '.join(["-r %s" % rev for rev in revs])
2370 2391 return 'diff %s %s' % (revinfo, f)
2371 2392
2372 2393 date1 = util.datestr(ctx1.date())
2373 2394 date2 = util.datestr(ctx2.date())
2374 2395
2375 2396 gitmode = {'l': '120000', 'x': '100755', '': '100644'}
2376 2397
2377 2398 if relroot != '' and (repo.ui.configbool('devel', 'all')
2378 2399 or repo.ui.configbool('devel', 'check-relroot')):
2379 2400 for f in modified + added + removed + copy.keys() + copy.values():
2380 2401 if f is not None and not f.startswith(relroot):
2381 2402 raise AssertionError(
2382 2403 "file %s doesn't start with relroot %s" % (f, relroot))
2383 2404
2384 2405 for f1, f2, copyop in _filepairs(
2385 2406 ctx1, modified, added, removed, copy, opts):
2386 2407 content1 = None
2387 2408 content2 = None
2388 2409 flag1 = None
2389 2410 flag2 = None
2390 2411 if f1:
2391 2412 content1 = getfilectx(f1, ctx1).data()
2392 2413 if opts.git or losedatafn:
2393 2414 flag1 = ctx1.flags(f1)
2394 2415 if f2:
2395 2416 content2 = getfilectx(f2, ctx2).data()
2396 2417 if opts.git or losedatafn:
2397 2418 flag2 = ctx2.flags(f2)
2398 2419 binary = False
2399 2420 if opts.git or losedatafn:
2400 2421 binary = util.binary(content1) or util.binary(content2)
2401 2422
2402 2423 if losedatafn and not opts.git:
2403 2424 if (binary or
2404 2425 # copy/rename
2405 2426 f2 in copy or
2406 2427 # empty file creation
2407 2428 (not f1 and not content2) or
2408 2429 # empty file deletion
2409 2430 (not content1 and not f2) or
2410 2431 # create with flags
2411 2432 (not f1 and flag2) or
2412 2433 # change flags
2413 2434 (f1 and f2 and flag1 != flag2)):
2414 2435 losedatafn(f2 or f1)
2415 2436
2416 2437 path1 = f1 or f2
2417 2438 path2 = f2 or f1
2418 2439 path1 = posixpath.join(prefix, path1[len(relroot):])
2419 2440 path2 = posixpath.join(prefix, path2[len(relroot):])
2420 2441 header = []
2421 2442 if opts.git:
2422 2443 header.append('diff --git %s%s %s%s' %
2423 2444 (aprefix, path1, bprefix, path2))
2424 2445 if not f1: # added
2425 2446 header.append('new file mode %s' % gitmode[flag2])
2426 2447 elif not f2: # removed
2427 2448 header.append('deleted file mode %s' % gitmode[flag1])
2428 2449 else: # modified/copied/renamed
2429 2450 mode1, mode2 = gitmode[flag1], gitmode[flag2]
2430 2451 if mode1 != mode2:
2431 2452 header.append('old mode %s' % mode1)
2432 2453 header.append('new mode %s' % mode2)
2433 2454 if copyop is not None:
2434 2455 header.append('%s from %s' % (copyop, path1))
2435 2456 header.append('%s to %s' % (copyop, path2))
2436 2457 elif revs and not repo.ui.quiet:
2437 2458 header.append(diffline(path1, revs))
2438 2459
2439 2460 if binary and opts.git and not opts.nobinary:
2440 2461 text = mdiff.b85diff(content1, content2)
2441 2462 if text:
2442 2463 header.append('index %s..%s' %
2443 2464 (gitindex(content1), gitindex(content2)))
2444 2465 else:
2445 2466 text = mdiff.unidiff(content1, date1,
2446 2467 content2, date2,
2447 2468 path1, path2, opts=opts)
2448 2469 if header and (text or len(header) > 1):
2449 2470 yield '\n'.join(header) + '\n'
2450 2471 if text:
2451 2472 yield text
2452 2473
2453 2474 def diffstatsum(stats):
2454 2475 maxfile, maxtotal, addtotal, removetotal, binary = 0, 0, 0, 0, False
2455 2476 for f, a, r, b in stats:
2456 2477 maxfile = max(maxfile, encoding.colwidth(f))
2457 2478 maxtotal = max(maxtotal, a + r)
2458 2479 addtotal += a
2459 2480 removetotal += r
2460 2481 binary = binary or b
2461 2482
2462 2483 return maxfile, maxtotal, addtotal, removetotal, binary
2463 2484
2464 2485 def diffstatdata(lines):
2465 2486 diffre = re.compile('^diff .*-r [a-z0-9]+\s(.*)$')
2466 2487
2467 2488 results = []
2468 2489 filename, adds, removes, isbinary = None, 0, 0, False
2469 2490
2470 2491 def addresult():
2471 2492 if filename:
2472 2493 results.append((filename, adds, removes, isbinary))
2473 2494
2474 2495 for line in lines:
2475 2496 if line.startswith('diff'):
2476 2497 addresult()
2477 2498 # set numbers to 0 anyway when starting new file
2478 2499 adds, removes, isbinary = 0, 0, False
2479 2500 if line.startswith('diff --git a/'):
2480 2501 filename = gitre.search(line).group(2)
2481 2502 elif line.startswith('diff -r'):
2482 2503 # format: "diff -r ... -r ... filename"
2483 2504 filename = diffre.search(line).group(1)
2484 2505 elif line.startswith('+') and not line.startswith('+++ '):
2485 2506 adds += 1
2486 2507 elif line.startswith('-') and not line.startswith('--- '):
2487 2508 removes += 1
2488 2509 elif (line.startswith('GIT binary patch') or
2489 2510 line.startswith('Binary file')):
2490 2511 isbinary = True
2491 2512 addresult()
2492 2513 return results
2493 2514
2494 2515 def diffstat(lines, width=80, git=False):
2495 2516 output = []
2496 2517 stats = diffstatdata(lines)
2497 2518 maxname, maxtotal, totaladds, totalremoves, hasbinary = diffstatsum(stats)
2498 2519
2499 2520 countwidth = len(str(maxtotal))
2500 2521 if hasbinary and countwidth < 3:
2501 2522 countwidth = 3
2502 2523 graphwidth = width - countwidth - maxname - 6
2503 2524 if graphwidth < 10:
2504 2525 graphwidth = 10
2505 2526
2506 2527 def scale(i):
2507 2528 if maxtotal <= graphwidth:
2508 2529 return i
2509 2530 # If diffstat runs out of room it doesn't print anything,
2510 2531 # which isn't very useful, so always print at least one + or -
2511 2532 # if there were at least some changes.
2512 2533 return max(i * graphwidth // maxtotal, int(bool(i)))
2513 2534
2514 2535 for filename, adds, removes, isbinary in stats:
2515 2536 if isbinary:
2516 2537 count = 'Bin'
2517 2538 else:
2518 2539 count = adds + removes
2519 2540 pluses = '+' * scale(adds)
2520 2541 minuses = '-' * scale(removes)
2521 2542 output.append(' %s%s | %*s %s%s\n' %
2522 2543 (filename, ' ' * (maxname - encoding.colwidth(filename)),
2523 2544 countwidth, count, pluses, minuses))
2524 2545
2525 2546 if stats:
2526 2547 output.append(_(' %d files changed, %d insertions(+), '
2527 2548 '%d deletions(-)\n')
2528 2549 % (len(stats), totaladds, totalremoves))
2529 2550
2530 2551 return ''.join(output)
2531 2552
2532 2553 def diffstatui(*args, **kw):
2533 2554 '''like diffstat(), but yields 2-tuples of (output, label) for
2534 2555 ui.write()
2535 2556 '''
2536 2557
2537 2558 for line in diffstat(*args, **kw).splitlines():
2538 2559 if line and line[-1] in '+-':
2539 2560 name, graph = line.rsplit(' ', 1)
2540 2561 yield (name + ' ', '')
2541 2562 m = re.search(r'\++', graph)
2542 2563 if m:
2543 2564 yield (m.group(0), 'diffstat.inserted')
2544 2565 m = re.search(r'-+', graph)
2545 2566 if m:
2546 2567 yield (m.group(0), 'diffstat.deleted')
2547 2568 else:
2548 2569 yield (line, '')
2549 2570 yield ('\n', '')
General Comments 0
You need to be logged in to leave comments. Login now