##// END OF EJS Templates
record: change interface of the filtering function...
Laurent Charignon -
r24341:616c01b6 default
parent child Browse files
Show More
@@ -1,3184 +1,3180 b''
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import hex, nullid, nullrev, short
9 9 from i18n import _
10 10 import os, sys, errno, re, tempfile, cStringIO, shutil
11 11 import util, scmutil, templater, patch, error, templatekw, revlog, copies
12 12 import match as matchmod
13 13 import context, repair, graphmod, revset, phases, obsolete, pathutil
14 14 import changelog
15 15 import bookmarks
16 16 import encoding
17 17 import crecord as crecordmod
18 18 import lock as lockmod
19 19
20 20 def parsealiases(cmd):
21 21 return cmd.lstrip("^").split("|")
22 22
23 def recordfilter(ui, fp):
24 return patch.filterpatch(ui, patch.parsepatch(fp))
23 def recordfilter(ui, originalhunks):
24 return patch.filterpatch(ui, originalhunks)
25 25
26 26 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
27 27 filterfn, *pats, **opts):
28 28 import merge as mergemod
29 29 if not ui.interactive():
30 30 raise util.Abort(_('running non-interactively, use %s instead') %
31 31 cmdsuggest)
32 32
33 33 # make sure username is set before going interactive
34 34 if not opts.get('user'):
35 35 ui.username() # raise exception, username not provided
36 36
37 37 def recordfunc(ui, repo, message, match, opts):
38 38 """This is generic record driver.
39 39
40 40 Its job is to interactively filter local changes, and
41 41 accordingly prepare working directory into a state in which the
42 42 job can be delegated to a non-interactive commit command such as
43 43 'commit' or 'qrefresh'.
44 44
45 45 After the actual job is done by non-interactive command, the
46 46 working directory is restored to its original state.
47 47
48 48 In the end we'll record interesting changes, and everything else
49 49 will be left in place, so the user can continue working.
50 50 """
51 51
52 52 checkunfinished(repo, commit=True)
53 53 merge = len(repo[None].parents()) > 1
54 54 if merge:
55 55 raise util.Abort(_('cannot partially commit a merge '
56 56 '(use "hg commit" instead)'))
57 57
58 58 status = repo.status(match=match)
59 59 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
60 60 diffopts.nodates = True
61 61 diffopts.git = True
62 originalchunks = patch.diff(repo, changes=status, opts=diffopts)
63 fp = cStringIO.StringIO()
64 fp.write(''.join(originalchunks))
65 fp.seek(0)
62 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
63 originalchunks = patch.parsepatch(originaldiff)
66 64
67 65 # 1. filter patch, so we have intending-to apply subset of it
68 66 try:
69 chunks = filterfn(ui, fp)
67 chunks = filterfn(ui, originalchunks)
70 68 except patch.PatchError, err:
71 69 raise util.Abort(_('error parsing patch: %s') % err)
72 70
73 del fp
74
75 71 contenders = set()
76 72 for h in chunks:
77 73 try:
78 74 contenders.update(set(h.files()))
79 75 except AttributeError:
80 76 pass
81 77
82 78 changed = status.modified + status.added + status.removed
83 79 newfiles = [f for f in changed if f in contenders]
84 80 if not newfiles:
85 81 ui.status(_('no changes to record\n'))
86 82 return 0
87 83
88 84 newandmodifiedfiles = set()
89 85 for h in chunks:
90 86 iscrecordhunk = isinstance(h, crecordmod.uihunk)
91 87 ishunk = isinstance(h, patch.recordhunk)
92 88 isnew = h.filename() in status.added
93 89 if (ishunk or iscrecordhunk) and isnew and not h in originalchunks:
94 90 newandmodifiedfiles.add(h.filename())
95 91
96 92 modified = set(status.modified)
97 93
98 94 # 2. backup changed files, so we can restore them in the end
99 95
100 96 if backupall:
101 97 tobackup = changed
102 98 else:
103 99 tobackup = [f for f in newfiles
104 100 if f in modified or f in newandmodifiedfiles]
105 101
106 102 backups = {}
107 103 if tobackup:
108 104 backupdir = repo.join('record-backups')
109 105 try:
110 106 os.mkdir(backupdir)
111 107 except OSError, err:
112 108 if err.errno != errno.EEXIST:
113 109 raise
114 110 try:
115 111 # backup continues
116 112 for f in tobackup:
117 113 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
118 114 dir=backupdir)
119 115 os.close(fd)
120 116 ui.debug('backup %r as %r\n' % (f, tmpname))
121 117 util.copyfile(repo.wjoin(f), tmpname)
122 118 shutil.copystat(repo.wjoin(f), tmpname)
123 119 backups[f] = tmpname
124 120
125 121 fp = cStringIO.StringIO()
126 122 for c in chunks:
127 123 fname = c.filename()
128 124 if fname in backups or fname in newandmodifiedfiles:
129 125 c.write(fp)
130 126 dopatch = fp.tell()
131 127 fp.seek(0)
132 128
133 129 [os.unlink(c) for c in newandmodifiedfiles]
134 130
135 131 # 3a. apply filtered patch to clean repo (clean)
136 132 if backups:
137 133 # Equivalent to hg.revert
138 134 choices = lambda key: key in backups
139 135 mergemod.update(repo, repo.dirstate.p1(),
140 136 False, True, choices)
141 137
142 138
143 139 # 3b. (apply)
144 140 if dopatch:
145 141 try:
146 142 ui.debug('applying patch\n')
147 143 ui.debug(fp.getvalue())
148 144 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
149 145 except patch.PatchError, err:
150 146 raise util.Abort(str(err))
151 147 del fp
152 148
153 149 # 4. We prepared working directory according to filtered
154 150 # patch. Now is the time to delegate the job to
155 151 # commit/qrefresh or the like!
156 152
157 153 # Make all of the pathnames absolute.
158 154 newfiles = [repo.wjoin(nf) for nf in newfiles]
159 155 commitfunc(ui, repo, *newfiles, **opts)
160 156
161 157 return 0
162 158 finally:
163 159 # 5. finally restore backed-up files
164 160 try:
165 161 for realname, tmpname in backups.iteritems():
166 162 ui.debug('restoring %r to %r\n' % (tmpname, realname))
167 163 util.copyfile(tmpname, repo.wjoin(realname))
168 164 # Our calls to copystat() here and above are a
169 165 # hack to trick any editors that have f open that
170 166 # we haven't modified them.
171 167 #
172 168 # Also note that this racy as an editor could
173 169 # notice the file's mtime before we've finished
174 170 # writing it.
175 171 shutil.copystat(tmpname, repo.wjoin(realname))
176 172 os.unlink(tmpname)
177 173 if tobackup:
178 174 os.rmdir(backupdir)
179 175 except OSError:
180 176 pass
181 177
182 178 # wrap ui.write so diff output can be labeled/colorized
183 179 def wrapwrite(orig, *args, **kw):
184 180 label = kw.pop('label', '')
185 181 for chunk, l in patch.difflabel(lambda: args):
186 182 orig(chunk, label=label + l)
187 183
188 184 oldwrite = ui.write
189 185 def wrap(*args, **kwargs):
190 186 return wrapwrite(oldwrite, *args, **kwargs)
191 187 setattr(ui, 'write', wrap)
192 188
193 189 try:
194 190 return commit(ui, repo, recordfunc, pats, opts)
195 191 finally:
196 192 ui.write = oldwrite
197 193
198 194
199 195 def findpossible(cmd, table, strict=False):
200 196 """
201 197 Return cmd -> (aliases, command table entry)
202 198 for each matching command.
203 199 Return debug commands (or their aliases) only if no normal command matches.
204 200 """
205 201 choice = {}
206 202 debugchoice = {}
207 203
208 204 if cmd in table:
209 205 # short-circuit exact matches, "log" alias beats "^log|history"
210 206 keys = [cmd]
211 207 else:
212 208 keys = table.keys()
213 209
214 210 allcmds = []
215 211 for e in keys:
216 212 aliases = parsealiases(e)
217 213 allcmds.extend(aliases)
218 214 found = None
219 215 if cmd in aliases:
220 216 found = cmd
221 217 elif not strict:
222 218 for a in aliases:
223 219 if a.startswith(cmd):
224 220 found = a
225 221 break
226 222 if found is not None:
227 223 if aliases[0].startswith("debug") or found.startswith("debug"):
228 224 debugchoice[found] = (aliases, table[e])
229 225 else:
230 226 choice[found] = (aliases, table[e])
231 227
232 228 if not choice and debugchoice:
233 229 choice = debugchoice
234 230
235 231 return choice, allcmds
236 232
237 233 def findcmd(cmd, table, strict=True):
238 234 """Return (aliases, command table entry) for command string."""
239 235 choice, allcmds = findpossible(cmd, table, strict)
240 236
241 237 if cmd in choice:
242 238 return choice[cmd]
243 239
244 240 if len(choice) > 1:
245 241 clist = choice.keys()
246 242 clist.sort()
247 243 raise error.AmbiguousCommand(cmd, clist)
248 244
249 245 if choice:
250 246 return choice.values()[0]
251 247
252 248 raise error.UnknownCommand(cmd, allcmds)
253 249
254 250 def findrepo(p):
255 251 while not os.path.isdir(os.path.join(p, ".hg")):
256 252 oldp, p = p, os.path.dirname(p)
257 253 if p == oldp:
258 254 return None
259 255
260 256 return p
261 257
262 258 def bailifchanged(repo):
263 259 if repo.dirstate.p2() != nullid:
264 260 raise util.Abort(_('outstanding uncommitted merge'))
265 261 modified, added, removed, deleted = repo.status()[:4]
266 262 if modified or added or removed or deleted:
267 263 raise util.Abort(_('uncommitted changes'))
268 264 ctx = repo[None]
269 265 for s in sorted(ctx.substate):
270 266 if ctx.sub(s).dirty():
271 267 raise util.Abort(_("uncommitted changes in subrepo %s") % s)
272 268
273 269 def logmessage(ui, opts):
274 270 """ get the log message according to -m and -l option """
275 271 message = opts.get('message')
276 272 logfile = opts.get('logfile')
277 273
278 274 if message and logfile:
279 275 raise util.Abort(_('options --message and --logfile are mutually '
280 276 'exclusive'))
281 277 if not message and logfile:
282 278 try:
283 279 if logfile == '-':
284 280 message = ui.fin.read()
285 281 else:
286 282 message = '\n'.join(util.readfile(logfile).splitlines())
287 283 except IOError, inst:
288 284 raise util.Abort(_("can't read commit message '%s': %s") %
289 285 (logfile, inst.strerror))
290 286 return message
291 287
292 288 def mergeeditform(ctxorbool, baseformname):
293 289 """return appropriate editform name (referencing a committemplate)
294 290
295 291 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
296 292 merging is committed.
297 293
298 294 This returns baseformname with '.merge' appended if it is a merge,
299 295 otherwise '.normal' is appended.
300 296 """
301 297 if isinstance(ctxorbool, bool):
302 298 if ctxorbool:
303 299 return baseformname + ".merge"
304 300 elif 1 < len(ctxorbool.parents()):
305 301 return baseformname + ".merge"
306 302
307 303 return baseformname + ".normal"
308 304
309 305 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
310 306 editform='', **opts):
311 307 """get appropriate commit message editor according to '--edit' option
312 308
313 309 'finishdesc' is a function to be called with edited commit message
314 310 (= 'description' of the new changeset) just after editing, but
315 311 before checking empty-ness. It should return actual text to be
316 312 stored into history. This allows to change description before
317 313 storing.
318 314
319 315 'extramsg' is a extra message to be shown in the editor instead of
320 316 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
321 317 is automatically added.
322 318
323 319 'editform' is a dot-separated list of names, to distinguish
324 320 the purpose of commit text editing.
325 321
326 322 'getcommiteditor' returns 'commitforceeditor' regardless of
327 323 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
328 324 they are specific for usage in MQ.
329 325 """
330 326 if edit or finishdesc or extramsg:
331 327 return lambda r, c, s: commitforceeditor(r, c, s,
332 328 finishdesc=finishdesc,
333 329 extramsg=extramsg,
334 330 editform=editform)
335 331 elif editform:
336 332 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
337 333 else:
338 334 return commiteditor
339 335
340 336 def loglimit(opts):
341 337 """get the log limit according to option -l/--limit"""
342 338 limit = opts.get('limit')
343 339 if limit:
344 340 try:
345 341 limit = int(limit)
346 342 except ValueError:
347 343 raise util.Abort(_('limit must be a positive integer'))
348 344 if limit <= 0:
349 345 raise util.Abort(_('limit must be positive'))
350 346 else:
351 347 limit = None
352 348 return limit
353 349
354 350 def makefilename(repo, pat, node, desc=None,
355 351 total=None, seqno=None, revwidth=None, pathname=None):
356 352 node_expander = {
357 353 'H': lambda: hex(node),
358 354 'R': lambda: str(repo.changelog.rev(node)),
359 355 'h': lambda: short(node),
360 356 'm': lambda: re.sub('[^\w]', '_', str(desc))
361 357 }
362 358 expander = {
363 359 '%': lambda: '%',
364 360 'b': lambda: os.path.basename(repo.root),
365 361 }
366 362
367 363 try:
368 364 if node:
369 365 expander.update(node_expander)
370 366 if node:
371 367 expander['r'] = (lambda:
372 368 str(repo.changelog.rev(node)).zfill(revwidth or 0))
373 369 if total is not None:
374 370 expander['N'] = lambda: str(total)
375 371 if seqno is not None:
376 372 expander['n'] = lambda: str(seqno)
377 373 if total is not None and seqno is not None:
378 374 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
379 375 if pathname is not None:
380 376 expander['s'] = lambda: os.path.basename(pathname)
381 377 expander['d'] = lambda: os.path.dirname(pathname) or '.'
382 378 expander['p'] = lambda: pathname
383 379
384 380 newname = []
385 381 patlen = len(pat)
386 382 i = 0
387 383 while i < patlen:
388 384 c = pat[i]
389 385 if c == '%':
390 386 i += 1
391 387 c = pat[i]
392 388 c = expander[c]()
393 389 newname.append(c)
394 390 i += 1
395 391 return ''.join(newname)
396 392 except KeyError, inst:
397 393 raise util.Abort(_("invalid format spec '%%%s' in output filename") %
398 394 inst.args[0])
399 395
400 396 def makefileobj(repo, pat, node=None, desc=None, total=None,
401 397 seqno=None, revwidth=None, mode='wb', modemap=None,
402 398 pathname=None):
403 399
404 400 writable = mode not in ('r', 'rb')
405 401
406 402 if not pat or pat == '-':
407 403 if writable:
408 404 fp = repo.ui.fout
409 405 else:
410 406 fp = repo.ui.fin
411 407 if util.safehasattr(fp, 'fileno'):
412 408 return os.fdopen(os.dup(fp.fileno()), mode)
413 409 else:
414 410 # if this fp can't be duped properly, return
415 411 # a dummy object that can be closed
416 412 class wrappedfileobj(object):
417 413 noop = lambda x: None
418 414 def __init__(self, f):
419 415 self.f = f
420 416 def __getattr__(self, attr):
421 417 if attr == 'close':
422 418 return self.noop
423 419 else:
424 420 return getattr(self.f, attr)
425 421
426 422 return wrappedfileobj(fp)
427 423 if util.safehasattr(pat, 'write') and writable:
428 424 return pat
429 425 if util.safehasattr(pat, 'read') and 'r' in mode:
430 426 return pat
431 427 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
432 428 if modemap is not None:
433 429 mode = modemap.get(fn, mode)
434 430 if mode == 'wb':
435 431 modemap[fn] = 'ab'
436 432 return open(fn, mode)
437 433
438 434 def openrevlog(repo, cmd, file_, opts):
439 435 """opens the changelog, manifest, a filelog or a given revlog"""
440 436 cl = opts['changelog']
441 437 mf = opts['manifest']
442 438 msg = None
443 439 if cl and mf:
444 440 msg = _('cannot specify --changelog and --manifest at the same time')
445 441 elif cl or mf:
446 442 if file_:
447 443 msg = _('cannot specify filename with --changelog or --manifest')
448 444 elif not repo:
449 445 msg = _('cannot specify --changelog or --manifest '
450 446 'without a repository')
451 447 if msg:
452 448 raise util.Abort(msg)
453 449
454 450 r = None
455 451 if repo:
456 452 if cl:
457 453 r = repo.unfiltered().changelog
458 454 elif mf:
459 455 r = repo.manifest
460 456 elif file_:
461 457 filelog = repo.file(file_)
462 458 if len(filelog):
463 459 r = filelog
464 460 if not r:
465 461 if not file_:
466 462 raise error.CommandError(cmd, _('invalid arguments'))
467 463 if not os.path.isfile(file_):
468 464 raise util.Abort(_("revlog '%s' not found") % file_)
469 465 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
470 466 file_[:-2] + ".i")
471 467 return r
472 468
473 469 def copy(ui, repo, pats, opts, rename=False):
474 470 # called with the repo lock held
475 471 #
476 472 # hgsep => pathname that uses "/" to separate directories
477 473 # ossep => pathname that uses os.sep to separate directories
478 474 cwd = repo.getcwd()
479 475 targets = {}
480 476 after = opts.get("after")
481 477 dryrun = opts.get("dry_run")
482 478 wctx = repo[None]
483 479
484 480 def walkpat(pat):
485 481 srcs = []
486 482 if after:
487 483 badstates = '?'
488 484 else:
489 485 badstates = '?r'
490 486 m = scmutil.match(repo[None], [pat], opts, globbed=True)
491 487 for abs in repo.walk(m):
492 488 state = repo.dirstate[abs]
493 489 rel = m.rel(abs)
494 490 exact = m.exact(abs)
495 491 if state in badstates:
496 492 if exact and state == '?':
497 493 ui.warn(_('%s: not copying - file is not managed\n') % rel)
498 494 if exact and state == 'r':
499 495 ui.warn(_('%s: not copying - file has been marked for'
500 496 ' remove\n') % rel)
501 497 continue
502 498 # abs: hgsep
503 499 # rel: ossep
504 500 srcs.append((abs, rel, exact))
505 501 return srcs
506 502
507 503 # abssrc: hgsep
508 504 # relsrc: ossep
509 505 # otarget: ossep
510 506 def copyfile(abssrc, relsrc, otarget, exact):
511 507 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
512 508 if '/' in abstarget:
513 509 # We cannot normalize abstarget itself, this would prevent
514 510 # case only renames, like a => A.
515 511 abspath, absname = abstarget.rsplit('/', 1)
516 512 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
517 513 reltarget = repo.pathto(abstarget, cwd)
518 514 target = repo.wjoin(abstarget)
519 515 src = repo.wjoin(abssrc)
520 516 state = repo.dirstate[abstarget]
521 517
522 518 scmutil.checkportable(ui, abstarget)
523 519
524 520 # check for collisions
525 521 prevsrc = targets.get(abstarget)
526 522 if prevsrc is not None:
527 523 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
528 524 (reltarget, repo.pathto(abssrc, cwd),
529 525 repo.pathto(prevsrc, cwd)))
530 526 return
531 527
532 528 # check for overwrites
533 529 exists = os.path.lexists(target)
534 530 samefile = False
535 531 if exists and abssrc != abstarget:
536 532 if (repo.dirstate.normalize(abssrc) ==
537 533 repo.dirstate.normalize(abstarget)):
538 534 if not rename:
539 535 ui.warn(_("%s: can't copy - same file\n") % reltarget)
540 536 return
541 537 exists = False
542 538 samefile = True
543 539
544 540 if not after and exists or after and state in 'mn':
545 541 if not opts['force']:
546 542 ui.warn(_('%s: not overwriting - file exists\n') %
547 543 reltarget)
548 544 return
549 545
550 546 if after:
551 547 if not exists:
552 548 if rename:
553 549 ui.warn(_('%s: not recording move - %s does not exist\n') %
554 550 (relsrc, reltarget))
555 551 else:
556 552 ui.warn(_('%s: not recording copy - %s does not exist\n') %
557 553 (relsrc, reltarget))
558 554 return
559 555 elif not dryrun:
560 556 try:
561 557 if exists:
562 558 os.unlink(target)
563 559 targetdir = os.path.dirname(target) or '.'
564 560 if not os.path.isdir(targetdir):
565 561 os.makedirs(targetdir)
566 562 if samefile:
567 563 tmp = target + "~hgrename"
568 564 os.rename(src, tmp)
569 565 os.rename(tmp, target)
570 566 else:
571 567 util.copyfile(src, target)
572 568 srcexists = True
573 569 except IOError, inst:
574 570 if inst.errno == errno.ENOENT:
575 571 ui.warn(_('%s: deleted in working copy\n') % relsrc)
576 572 srcexists = False
577 573 else:
578 574 ui.warn(_('%s: cannot copy - %s\n') %
579 575 (relsrc, inst.strerror))
580 576 return True # report a failure
581 577
582 578 if ui.verbose or not exact:
583 579 if rename:
584 580 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
585 581 else:
586 582 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
587 583
588 584 targets[abstarget] = abssrc
589 585
590 586 # fix up dirstate
591 587 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
592 588 dryrun=dryrun, cwd=cwd)
593 589 if rename and not dryrun:
594 590 if not after and srcexists and not samefile:
595 591 util.unlinkpath(repo.wjoin(abssrc))
596 592 wctx.forget([abssrc])
597 593
598 594 # pat: ossep
599 595 # dest ossep
600 596 # srcs: list of (hgsep, hgsep, ossep, bool)
601 597 # return: function that takes hgsep and returns ossep
602 598 def targetpathfn(pat, dest, srcs):
603 599 if os.path.isdir(pat):
604 600 abspfx = pathutil.canonpath(repo.root, cwd, pat)
605 601 abspfx = util.localpath(abspfx)
606 602 if destdirexists:
607 603 striplen = len(os.path.split(abspfx)[0])
608 604 else:
609 605 striplen = len(abspfx)
610 606 if striplen:
611 607 striplen += len(os.sep)
612 608 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
613 609 elif destdirexists:
614 610 res = lambda p: os.path.join(dest,
615 611 os.path.basename(util.localpath(p)))
616 612 else:
617 613 res = lambda p: dest
618 614 return res
619 615
620 616 # pat: ossep
621 617 # dest ossep
622 618 # srcs: list of (hgsep, hgsep, ossep, bool)
623 619 # return: function that takes hgsep and returns ossep
624 620 def targetpathafterfn(pat, dest, srcs):
625 621 if matchmod.patkind(pat):
626 622 # a mercurial pattern
627 623 res = lambda p: os.path.join(dest,
628 624 os.path.basename(util.localpath(p)))
629 625 else:
630 626 abspfx = pathutil.canonpath(repo.root, cwd, pat)
631 627 if len(abspfx) < len(srcs[0][0]):
632 628 # A directory. Either the target path contains the last
633 629 # component of the source path or it does not.
634 630 def evalpath(striplen):
635 631 score = 0
636 632 for s in srcs:
637 633 t = os.path.join(dest, util.localpath(s[0])[striplen:])
638 634 if os.path.lexists(t):
639 635 score += 1
640 636 return score
641 637
642 638 abspfx = util.localpath(abspfx)
643 639 striplen = len(abspfx)
644 640 if striplen:
645 641 striplen += len(os.sep)
646 642 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
647 643 score = evalpath(striplen)
648 644 striplen1 = len(os.path.split(abspfx)[0])
649 645 if striplen1:
650 646 striplen1 += len(os.sep)
651 647 if evalpath(striplen1) > score:
652 648 striplen = striplen1
653 649 res = lambda p: os.path.join(dest,
654 650 util.localpath(p)[striplen:])
655 651 else:
656 652 # a file
657 653 if destdirexists:
658 654 res = lambda p: os.path.join(dest,
659 655 os.path.basename(util.localpath(p)))
660 656 else:
661 657 res = lambda p: dest
662 658 return res
663 659
664 660
665 661 pats = scmutil.expandpats(pats)
666 662 if not pats:
667 663 raise util.Abort(_('no source or destination specified'))
668 664 if len(pats) == 1:
669 665 raise util.Abort(_('no destination specified'))
670 666 dest = pats.pop()
671 667 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
672 668 if not destdirexists:
673 669 if len(pats) > 1 or matchmod.patkind(pats[0]):
674 670 raise util.Abort(_('with multiple sources, destination must be an '
675 671 'existing directory'))
676 672 if util.endswithsep(dest):
677 673 raise util.Abort(_('destination %s is not a directory') % dest)
678 674
679 675 tfn = targetpathfn
680 676 if after:
681 677 tfn = targetpathafterfn
682 678 copylist = []
683 679 for pat in pats:
684 680 srcs = walkpat(pat)
685 681 if not srcs:
686 682 continue
687 683 copylist.append((tfn(pat, dest, srcs), srcs))
688 684 if not copylist:
689 685 raise util.Abort(_('no files to copy'))
690 686
691 687 errors = 0
692 688 for targetpath, srcs in copylist:
693 689 for abssrc, relsrc, exact in srcs:
694 690 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
695 691 errors += 1
696 692
697 693 if errors:
698 694 ui.warn(_('(consider using --after)\n'))
699 695
700 696 return errors != 0
701 697
702 698 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
703 699 runargs=None, appendpid=False):
704 700 '''Run a command as a service.'''
705 701
706 702 def writepid(pid):
707 703 if opts['pid_file']:
708 704 if appendpid:
709 705 mode = 'a'
710 706 else:
711 707 mode = 'w'
712 708 fp = open(opts['pid_file'], mode)
713 709 fp.write(str(pid) + '\n')
714 710 fp.close()
715 711
716 712 if opts['daemon'] and not opts['daemon_pipefds']:
717 713 # Signal child process startup with file removal
718 714 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
719 715 os.close(lockfd)
720 716 try:
721 717 if not runargs:
722 718 runargs = util.hgcmd() + sys.argv[1:]
723 719 runargs.append('--daemon-pipefds=%s' % lockpath)
724 720 # Don't pass --cwd to the child process, because we've already
725 721 # changed directory.
726 722 for i in xrange(1, len(runargs)):
727 723 if runargs[i].startswith('--cwd='):
728 724 del runargs[i]
729 725 break
730 726 elif runargs[i].startswith('--cwd'):
731 727 del runargs[i:i + 2]
732 728 break
733 729 def condfn():
734 730 return not os.path.exists(lockpath)
735 731 pid = util.rundetached(runargs, condfn)
736 732 if pid < 0:
737 733 raise util.Abort(_('child process failed to start'))
738 734 writepid(pid)
739 735 finally:
740 736 try:
741 737 os.unlink(lockpath)
742 738 except OSError, e:
743 739 if e.errno != errno.ENOENT:
744 740 raise
745 741 if parentfn:
746 742 return parentfn(pid)
747 743 else:
748 744 return
749 745
750 746 if initfn:
751 747 initfn()
752 748
753 749 if not opts['daemon']:
754 750 writepid(os.getpid())
755 751
756 752 if opts['daemon_pipefds']:
757 753 lockpath = opts['daemon_pipefds']
758 754 try:
759 755 os.setsid()
760 756 except AttributeError:
761 757 pass
762 758 os.unlink(lockpath)
763 759 util.hidewindow()
764 760 sys.stdout.flush()
765 761 sys.stderr.flush()
766 762
767 763 nullfd = os.open(os.devnull, os.O_RDWR)
768 764 logfilefd = nullfd
769 765 if logfile:
770 766 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
771 767 os.dup2(nullfd, 0)
772 768 os.dup2(logfilefd, 1)
773 769 os.dup2(logfilefd, 2)
774 770 if nullfd not in (0, 1, 2):
775 771 os.close(nullfd)
776 772 if logfile and logfilefd not in (0, 1, 2):
777 773 os.close(logfilefd)
778 774
779 775 if runfn:
780 776 return runfn()
781 777
782 778 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
783 779 """Utility function used by commands.import to import a single patch
784 780
785 781 This function is explicitly defined here to help the evolve extension to
786 782 wrap this part of the import logic.
787 783
788 784 The API is currently a bit ugly because it a simple code translation from
789 785 the import command. Feel free to make it better.
790 786
791 787 :hunk: a patch (as a binary string)
792 788 :parents: nodes that will be parent of the created commit
793 789 :opts: the full dict of option passed to the import command
794 790 :msgs: list to save commit message to.
795 791 (used in case we need to save it when failing)
796 792 :updatefunc: a function that update a repo to a given node
797 793 updatefunc(<repo>, <node>)
798 794 """
799 795 tmpname, message, user, date, branch, nodeid, p1, p2 = \
800 796 patch.extract(ui, hunk)
801 797
802 798 update = not opts.get('bypass')
803 799 strip = opts["strip"]
804 800 prefix = opts["prefix"]
805 801 sim = float(opts.get('similarity') or 0)
806 802 if not tmpname:
807 803 return (None, None, False)
808 804 msg = _('applied to working directory')
809 805
810 806 rejects = False
811 807
812 808 try:
813 809 cmdline_message = logmessage(ui, opts)
814 810 if cmdline_message:
815 811 # pickup the cmdline msg
816 812 message = cmdline_message
817 813 elif message:
818 814 # pickup the patch msg
819 815 message = message.strip()
820 816 else:
821 817 # launch the editor
822 818 message = None
823 819 ui.debug('message:\n%s\n' % message)
824 820
825 821 if len(parents) == 1:
826 822 parents.append(repo[nullid])
827 823 if opts.get('exact'):
828 824 if not nodeid or not p1:
829 825 raise util.Abort(_('not a Mercurial patch'))
830 826 p1 = repo[p1]
831 827 p2 = repo[p2 or nullid]
832 828 elif p2:
833 829 try:
834 830 p1 = repo[p1]
835 831 p2 = repo[p2]
836 832 # Without any options, consider p2 only if the
837 833 # patch is being applied on top of the recorded
838 834 # first parent.
839 835 if p1 != parents[0]:
840 836 p1 = parents[0]
841 837 p2 = repo[nullid]
842 838 except error.RepoError:
843 839 p1, p2 = parents
844 840 if p2.node() == nullid:
845 841 ui.warn(_("warning: import the patch as a normal revision\n"
846 842 "(use --exact to import the patch as a merge)\n"))
847 843 else:
848 844 p1, p2 = parents
849 845
850 846 n = None
851 847 if update:
852 848 repo.dirstate.beginparentchange()
853 849 if p1 != parents[0]:
854 850 updatefunc(repo, p1.node())
855 851 if p2 != parents[1]:
856 852 repo.setparents(p1.node(), p2.node())
857 853
858 854 if opts.get('exact') or opts.get('import_branch'):
859 855 repo.dirstate.setbranch(branch or 'default')
860 856
861 857 partial = opts.get('partial', False)
862 858 files = set()
863 859 try:
864 860 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
865 861 files=files, eolmode=None, similarity=sim / 100.0)
866 862 except patch.PatchError, e:
867 863 if not partial:
868 864 raise util.Abort(str(e))
869 865 if partial:
870 866 rejects = True
871 867
872 868 files = list(files)
873 869 if opts.get('no_commit'):
874 870 if message:
875 871 msgs.append(message)
876 872 else:
877 873 if opts.get('exact') or p2:
878 874 # If you got here, you either use --force and know what
879 875 # you are doing or used --exact or a merge patch while
880 876 # being updated to its first parent.
881 877 m = None
882 878 else:
883 879 m = scmutil.matchfiles(repo, files or [])
884 880 editform = mergeeditform(repo[None], 'import.normal')
885 881 if opts.get('exact'):
886 882 editor = None
887 883 else:
888 884 editor = getcommiteditor(editform=editform, **opts)
889 885 n = repo.commit(message, opts.get('user') or user,
890 886 opts.get('date') or date, match=m,
891 887 editor=editor, force=partial)
892 888 repo.dirstate.endparentchange()
893 889 else:
894 890 if opts.get('exact') or opts.get('import_branch'):
895 891 branch = branch or 'default'
896 892 else:
897 893 branch = p1.branch()
898 894 store = patch.filestore()
899 895 try:
900 896 files = set()
901 897 try:
902 898 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
903 899 files, eolmode=None)
904 900 except patch.PatchError, e:
905 901 raise util.Abort(str(e))
906 902 if opts.get('exact'):
907 903 editor = None
908 904 else:
909 905 editor = getcommiteditor(editform='import.bypass')
910 906 memctx = context.makememctx(repo, (p1.node(), p2.node()),
911 907 message,
912 908 opts.get('user') or user,
913 909 opts.get('date') or date,
914 910 branch, files, store,
915 911 editor=editor)
916 912 n = memctx.commit()
917 913 finally:
918 914 store.close()
919 915 if opts.get('exact') and opts.get('no_commit'):
920 916 # --exact with --no-commit is still useful in that it does merge
921 917 # and branch bits
922 918 ui.warn(_("warning: can't check exact import with --no-commit\n"))
923 919 elif opts.get('exact') and hex(n) != nodeid:
924 920 raise util.Abort(_('patch is damaged or loses information'))
925 921 if n:
926 922 # i18n: refers to a short changeset id
927 923 msg = _('created %s') % short(n)
928 924 return (msg, n, rejects)
929 925 finally:
930 926 os.unlink(tmpname)
931 927
932 928 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
933 929 opts=None):
934 930 '''export changesets as hg patches.'''
935 931
936 932 total = len(revs)
937 933 revwidth = max([len(str(rev)) for rev in revs])
938 934 filemode = {}
939 935
940 936 def single(rev, seqno, fp):
941 937 ctx = repo[rev]
942 938 node = ctx.node()
943 939 parents = [p.node() for p in ctx.parents() if p]
944 940 branch = ctx.branch()
945 941 if switch_parent:
946 942 parents.reverse()
947 943
948 944 if parents:
949 945 prev = parents[0]
950 946 else:
951 947 prev = nullid
952 948
953 949 shouldclose = False
954 950 if not fp and len(template) > 0:
955 951 desc_lines = ctx.description().rstrip().split('\n')
956 952 desc = desc_lines[0] #Commit always has a first line.
957 953 fp = makefileobj(repo, template, node, desc=desc, total=total,
958 954 seqno=seqno, revwidth=revwidth, mode='wb',
959 955 modemap=filemode)
960 956 if fp != template:
961 957 shouldclose = True
962 958 if fp and fp != sys.stdout and util.safehasattr(fp, 'name'):
963 959 repo.ui.note("%s\n" % fp.name)
964 960
965 961 if not fp:
966 962 write = repo.ui.write
967 963 else:
968 964 def write(s, **kw):
969 965 fp.write(s)
970 966
971 967
972 968 write("# HG changeset patch\n")
973 969 write("# User %s\n" % ctx.user())
974 970 write("# Date %d %d\n" % ctx.date())
975 971 write("# %s\n" % util.datestr(ctx.date()))
976 972 if branch and branch != 'default':
977 973 write("# Branch %s\n" % branch)
978 974 write("# Node ID %s\n" % hex(node))
979 975 write("# Parent %s\n" % hex(prev))
980 976 if len(parents) > 1:
981 977 write("# Parent %s\n" % hex(parents[1]))
982 978 write(ctx.description().rstrip())
983 979 write("\n\n")
984 980
985 981 for chunk, label in patch.diffui(repo, prev, node, opts=opts):
986 982 write(chunk, label=label)
987 983
988 984 if shouldclose:
989 985 fp.close()
990 986
991 987 for seqno, rev in enumerate(revs):
992 988 single(rev, seqno + 1, fp)
993 989
994 990 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
995 991 changes=None, stat=False, fp=None, prefix='',
996 992 listsubrepos=False):
997 993 '''show diff or diffstat.'''
998 994 if fp is None:
999 995 write = ui.write
1000 996 else:
1001 997 def write(s, **kw):
1002 998 fp.write(s)
1003 999
1004 1000 if stat:
1005 1001 diffopts = diffopts.copy(context=0)
1006 1002 width = 80
1007 1003 if not ui.plain():
1008 1004 width = ui.termwidth()
1009 1005 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1010 1006 prefix=prefix)
1011 1007 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1012 1008 width=width,
1013 1009 git=diffopts.git):
1014 1010 write(chunk, label=label)
1015 1011 else:
1016 1012 for chunk, label in patch.diffui(repo, node1, node2, match,
1017 1013 changes, diffopts, prefix=prefix):
1018 1014 write(chunk, label=label)
1019 1015
1020 1016 if listsubrepos:
1021 1017 ctx1 = repo[node1]
1022 1018 ctx2 = repo[node2]
1023 1019 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1024 1020 tempnode2 = node2
1025 1021 try:
1026 1022 if node2 is not None:
1027 1023 tempnode2 = ctx2.substate[subpath][1]
1028 1024 except KeyError:
1029 1025 # A subrepo that existed in node1 was deleted between node1 and
1030 1026 # node2 (inclusive). Thus, ctx2's substate won't contain that
1031 1027 # subpath. The best we can do is to ignore it.
1032 1028 tempnode2 = None
1033 1029 submatch = matchmod.narrowmatcher(subpath, match)
1034 1030 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1035 1031 stat=stat, fp=fp, prefix=prefix)
1036 1032
1037 1033 class changeset_printer(object):
1038 1034 '''show changeset information when templating not requested.'''
1039 1035
1040 1036 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1041 1037 self.ui = ui
1042 1038 self.repo = repo
1043 1039 self.buffered = buffered
1044 1040 self.matchfn = matchfn
1045 1041 self.diffopts = diffopts
1046 1042 self.header = {}
1047 1043 self.hunk = {}
1048 1044 self.lastheader = None
1049 1045 self.footer = None
1050 1046
1051 1047 def flush(self, rev):
1052 1048 if rev in self.header:
1053 1049 h = self.header[rev]
1054 1050 if h != self.lastheader:
1055 1051 self.lastheader = h
1056 1052 self.ui.write(h)
1057 1053 del self.header[rev]
1058 1054 if rev in self.hunk:
1059 1055 self.ui.write(self.hunk[rev])
1060 1056 del self.hunk[rev]
1061 1057 return 1
1062 1058 return 0
1063 1059
1064 1060 def close(self):
1065 1061 if self.footer:
1066 1062 self.ui.write(self.footer)
1067 1063
1068 1064 def show(self, ctx, copies=None, matchfn=None, **props):
1069 1065 if self.buffered:
1070 1066 self.ui.pushbuffer()
1071 1067 self._show(ctx, copies, matchfn, props)
1072 1068 self.hunk[ctx.rev()] = self.ui.popbuffer(labeled=True)
1073 1069 else:
1074 1070 self._show(ctx, copies, matchfn, props)
1075 1071
1076 1072 def _show(self, ctx, copies, matchfn, props):
1077 1073 '''show a single changeset or file revision'''
1078 1074 changenode = ctx.node()
1079 1075 rev = ctx.rev()
1080 1076
1081 1077 if self.ui.quiet:
1082 1078 self.ui.write("%d:%s\n" % (rev, short(changenode)),
1083 1079 label='log.node')
1084 1080 return
1085 1081
1086 1082 log = self.repo.changelog
1087 1083 date = util.datestr(ctx.date())
1088 1084
1089 1085 if self.ui.debugflag:
1090 1086 hexfunc = hex
1091 1087 else:
1092 1088 hexfunc = short
1093 1089
1094 1090 parents = [(p, hexfunc(log.node(p)))
1095 1091 for p in self._meaningful_parentrevs(log, rev)]
1096 1092
1097 1093 # i18n: column positioning for "hg log"
1098 1094 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)),
1099 1095 label='log.changeset changeset.%s' % ctx.phasestr())
1100 1096
1101 1097 # branches are shown first before any other names due to backwards
1102 1098 # compatibility
1103 1099 branch = ctx.branch()
1104 1100 # don't show the default branch name
1105 1101 if branch != 'default':
1106 1102 # i18n: column positioning for "hg log"
1107 1103 self.ui.write(_("branch: %s\n") % branch,
1108 1104 label='log.branch')
1109 1105
1110 1106 for name, ns in self.repo.names.iteritems():
1111 1107 # branches has special logic already handled above, so here we just
1112 1108 # skip it
1113 1109 if name == 'branches':
1114 1110 continue
1115 1111 # we will use the templatename as the color name since those two
1116 1112 # should be the same
1117 1113 for name in ns.names(self.repo, changenode):
1118 1114 self.ui.write(ns.logfmt % name,
1119 1115 label='log.%s' % ns.colorname)
1120 1116 if self.ui.debugflag:
1121 1117 # i18n: column positioning for "hg log"
1122 1118 self.ui.write(_("phase: %s\n") % _(ctx.phasestr()),
1123 1119 label='log.phase')
1124 1120 for parent in parents:
1125 1121 label = 'log.parent changeset.%s' % self.repo[parent[0]].phasestr()
1126 1122 # i18n: column positioning for "hg log"
1127 1123 self.ui.write(_("parent: %d:%s\n") % parent,
1128 1124 label=label)
1129 1125
1130 1126 if self.ui.debugflag:
1131 1127 mnode = ctx.manifestnode()
1132 1128 # i18n: column positioning for "hg log"
1133 1129 self.ui.write(_("manifest: %d:%s\n") %
1134 1130 (self.repo.manifest.rev(mnode), hex(mnode)),
1135 1131 label='ui.debug log.manifest')
1136 1132 # i18n: column positioning for "hg log"
1137 1133 self.ui.write(_("user: %s\n") % ctx.user(),
1138 1134 label='log.user')
1139 1135 # i18n: column positioning for "hg log"
1140 1136 self.ui.write(_("date: %s\n") % date,
1141 1137 label='log.date')
1142 1138
1143 1139 if self.ui.debugflag:
1144 1140 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
1145 1141 for key, value in zip([# i18n: column positioning for "hg log"
1146 1142 _("files:"),
1147 1143 # i18n: column positioning for "hg log"
1148 1144 _("files+:"),
1149 1145 # i18n: column positioning for "hg log"
1150 1146 _("files-:")], files):
1151 1147 if value:
1152 1148 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1153 1149 label='ui.debug log.files')
1154 1150 elif ctx.files() and self.ui.verbose:
1155 1151 # i18n: column positioning for "hg log"
1156 1152 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1157 1153 label='ui.note log.files')
1158 1154 if copies and self.ui.verbose:
1159 1155 copies = ['%s (%s)' % c for c in copies]
1160 1156 # i18n: column positioning for "hg log"
1161 1157 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1162 1158 label='ui.note log.copies')
1163 1159
1164 1160 extra = ctx.extra()
1165 1161 if extra and self.ui.debugflag:
1166 1162 for key, value in sorted(extra.items()):
1167 1163 # i18n: column positioning for "hg log"
1168 1164 self.ui.write(_("extra: %s=%s\n")
1169 1165 % (key, value.encode('string_escape')),
1170 1166 label='ui.debug log.extra')
1171 1167
1172 1168 description = ctx.description().strip()
1173 1169 if description:
1174 1170 if self.ui.verbose:
1175 1171 self.ui.write(_("description:\n"),
1176 1172 label='ui.note log.description')
1177 1173 self.ui.write(description,
1178 1174 label='ui.note log.description')
1179 1175 self.ui.write("\n\n")
1180 1176 else:
1181 1177 # i18n: column positioning for "hg log"
1182 1178 self.ui.write(_("summary: %s\n") %
1183 1179 description.splitlines()[0],
1184 1180 label='log.summary')
1185 1181 self.ui.write("\n")
1186 1182
1187 1183 self.showpatch(changenode, matchfn)
1188 1184
1189 1185 def showpatch(self, node, matchfn):
1190 1186 if not matchfn:
1191 1187 matchfn = self.matchfn
1192 1188 if matchfn:
1193 1189 stat = self.diffopts.get('stat')
1194 1190 diff = self.diffopts.get('patch')
1195 1191 diffopts = patch.diffallopts(self.ui, self.diffopts)
1196 1192 prev = self.repo.changelog.parents(node)[0]
1197 1193 if stat:
1198 1194 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1199 1195 match=matchfn, stat=True)
1200 1196 if diff:
1201 1197 if stat:
1202 1198 self.ui.write("\n")
1203 1199 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1204 1200 match=matchfn, stat=False)
1205 1201 self.ui.write("\n")
1206 1202
1207 1203 def _meaningful_parentrevs(self, log, rev):
1208 1204 """Return list of meaningful (or all if debug) parentrevs for rev.
1209 1205
1210 1206 For merges (two non-nullrev revisions) both parents are meaningful.
1211 1207 Otherwise the first parent revision is considered meaningful if it
1212 1208 is not the preceding revision.
1213 1209 """
1214 1210 parents = log.parentrevs(rev)
1215 1211 if not self.ui.debugflag and parents[1] == nullrev:
1216 1212 if parents[0] >= rev - 1:
1217 1213 parents = []
1218 1214 else:
1219 1215 parents = [parents[0]]
1220 1216 return parents
1221 1217
1222 1218 class jsonchangeset(changeset_printer):
1223 1219 '''format changeset information.'''
1224 1220
1225 1221 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1226 1222 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1227 1223 self.cache = {}
1228 1224 self._first = True
1229 1225
1230 1226 def close(self):
1231 1227 if not self._first:
1232 1228 self.ui.write("\n]\n")
1233 1229 else:
1234 1230 self.ui.write("[]\n")
1235 1231
1236 1232 def _show(self, ctx, copies, matchfn, props):
1237 1233 '''show a single changeset or file revision'''
1238 1234 hexnode = hex(ctx.node())
1239 1235 rev = ctx.rev()
1240 1236 j = encoding.jsonescape
1241 1237
1242 1238 if self._first:
1243 1239 self.ui.write("[\n {")
1244 1240 self._first = False
1245 1241 else:
1246 1242 self.ui.write(",\n {")
1247 1243
1248 1244 if self.ui.quiet:
1249 1245 self.ui.write('\n "rev": %d' % rev)
1250 1246 self.ui.write(',\n "node": "%s"' % hexnode)
1251 1247 self.ui.write('\n }')
1252 1248 return
1253 1249
1254 1250 self.ui.write('\n "rev": %d' % rev)
1255 1251 self.ui.write(',\n "node": "%s"' % hexnode)
1256 1252 self.ui.write(',\n "branch": "%s"' % j(ctx.branch()))
1257 1253 self.ui.write(',\n "phase": "%s"' % ctx.phasestr())
1258 1254 self.ui.write(',\n "user": "%s"' % j(ctx.user()))
1259 1255 self.ui.write(',\n "date": [%d, %d]' % ctx.date())
1260 1256 self.ui.write(',\n "desc": "%s"' % j(ctx.description()))
1261 1257
1262 1258 self.ui.write(',\n "bookmarks": [%s]' %
1263 1259 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1264 1260 self.ui.write(',\n "tags": [%s]' %
1265 1261 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1266 1262 self.ui.write(',\n "parents": [%s]' %
1267 1263 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1268 1264
1269 1265 if self.ui.debugflag:
1270 1266 self.ui.write(',\n "manifest": "%s"' % hex(ctx.manifestnode()))
1271 1267
1272 1268 self.ui.write(',\n "extra": {%s}' %
1273 1269 ", ".join('"%s": "%s"' % (j(k), j(v))
1274 1270 for k, v in ctx.extra().items()))
1275 1271
1276 1272 files = ctx.p1().status(ctx)
1277 1273 self.ui.write(',\n "modified": [%s]' %
1278 1274 ", ".join('"%s"' % j(f) for f in files[0]))
1279 1275 self.ui.write(',\n "added": [%s]' %
1280 1276 ", ".join('"%s"' % j(f) for f in files[1]))
1281 1277 self.ui.write(',\n "removed": [%s]' %
1282 1278 ", ".join('"%s"' % j(f) for f in files[2]))
1283 1279
1284 1280 elif self.ui.verbose:
1285 1281 self.ui.write(',\n "files": [%s]' %
1286 1282 ", ".join('"%s"' % j(f) for f in ctx.files()))
1287 1283
1288 1284 if copies:
1289 1285 self.ui.write(',\n "copies": {%s}' %
1290 1286 ", ".join('"%s": "%s"' % (j(k), j(v))
1291 1287 for k, v in copies))
1292 1288
1293 1289 matchfn = self.matchfn
1294 1290 if matchfn:
1295 1291 stat = self.diffopts.get('stat')
1296 1292 diff = self.diffopts.get('patch')
1297 1293 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1298 1294 node, prev = ctx.node(), ctx.p1().node()
1299 1295 if stat:
1300 1296 self.ui.pushbuffer()
1301 1297 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1302 1298 match=matchfn, stat=True)
1303 1299 self.ui.write(',\n "diffstat": "%s"' % j(self.ui.popbuffer()))
1304 1300 if diff:
1305 1301 self.ui.pushbuffer()
1306 1302 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1307 1303 match=matchfn, stat=False)
1308 1304 self.ui.write(',\n "diff": "%s"' % j(self.ui.popbuffer()))
1309 1305
1310 1306 self.ui.write("\n }")
1311 1307
1312 1308 class changeset_templater(changeset_printer):
1313 1309 '''format changeset information.'''
1314 1310
1315 1311 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1316 1312 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1317 1313 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
1318 1314 defaulttempl = {
1319 1315 'parent': '{rev}:{node|formatnode} ',
1320 1316 'manifest': '{rev}:{node|formatnode}',
1321 1317 'file_copy': '{name} ({source})',
1322 1318 'extra': '{key}={value|stringescape}'
1323 1319 }
1324 1320 # filecopy is preserved for compatibility reasons
1325 1321 defaulttempl['filecopy'] = defaulttempl['file_copy']
1326 1322 self.t = templater.templater(mapfile, {'formatnode': formatnode},
1327 1323 cache=defaulttempl)
1328 1324 if tmpl:
1329 1325 self.t.cache['changeset'] = tmpl
1330 1326
1331 1327 self.cache = {}
1332 1328
1333 1329 def _meaningful_parentrevs(self, ctx):
1334 1330 """Return list of meaningful (or all if debug) parentrevs for rev.
1335 1331 """
1336 1332 parents = ctx.parents()
1337 1333 if len(parents) > 1:
1338 1334 return parents
1339 1335 if self.ui.debugflag:
1340 1336 return [parents[0], self.repo['null']]
1341 1337 if parents[0].rev() >= ctx.rev() - 1:
1342 1338 return []
1343 1339 return parents
1344 1340
1345 1341 def _show(self, ctx, copies, matchfn, props):
1346 1342 '''show a single changeset or file revision'''
1347 1343
1348 1344 showlist = templatekw.showlist
1349 1345
1350 1346 # showparents() behaviour depends on ui trace level which
1351 1347 # causes unexpected behaviours at templating level and makes
1352 1348 # it harder to extract it in a standalone function. Its
1353 1349 # behaviour cannot be changed so leave it here for now.
1354 1350 def showparents(**args):
1355 1351 ctx = args['ctx']
1356 1352 parents = [[('rev', p.rev()),
1357 1353 ('node', p.hex()),
1358 1354 ('phase', p.phasestr())]
1359 1355 for p in self._meaningful_parentrevs(ctx)]
1360 1356 return showlist('parent', parents, **args)
1361 1357
1362 1358 props = props.copy()
1363 1359 props.update(templatekw.keywords)
1364 1360 props['parents'] = showparents
1365 1361 props['templ'] = self.t
1366 1362 props['ctx'] = ctx
1367 1363 props['repo'] = self.repo
1368 1364 props['revcache'] = {'copies': copies}
1369 1365 props['cache'] = self.cache
1370 1366
1371 1367 # find correct templates for current mode
1372 1368
1373 1369 tmplmodes = [
1374 1370 (True, None),
1375 1371 (self.ui.verbose, 'verbose'),
1376 1372 (self.ui.quiet, 'quiet'),
1377 1373 (self.ui.debugflag, 'debug'),
1378 1374 ]
1379 1375
1380 1376 types = {'header': '', 'footer':'', 'changeset': 'changeset'}
1381 1377 for mode, postfix in tmplmodes:
1382 1378 for type in types:
1383 1379 cur = postfix and ('%s_%s' % (type, postfix)) or type
1384 1380 if mode and cur in self.t:
1385 1381 types[type] = cur
1386 1382
1387 1383 try:
1388 1384
1389 1385 # write header
1390 1386 if types['header']:
1391 1387 h = templater.stringify(self.t(types['header'], **props))
1392 1388 if self.buffered:
1393 1389 self.header[ctx.rev()] = h
1394 1390 else:
1395 1391 if self.lastheader != h:
1396 1392 self.lastheader = h
1397 1393 self.ui.write(h)
1398 1394
1399 1395 # write changeset metadata, then patch if requested
1400 1396 key = types['changeset']
1401 1397 self.ui.write(templater.stringify(self.t(key, **props)))
1402 1398 self.showpatch(ctx.node(), matchfn)
1403 1399
1404 1400 if types['footer']:
1405 1401 if not self.footer:
1406 1402 self.footer = templater.stringify(self.t(types['footer'],
1407 1403 **props))
1408 1404
1409 1405 except KeyError, inst:
1410 1406 msg = _("%s: no key named '%s'")
1411 1407 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
1412 1408 except SyntaxError, inst:
1413 1409 raise util.Abort('%s: %s' % (self.t.mapfile, inst.args[0]))
1414 1410
1415 1411 def gettemplate(ui, tmpl, style):
1416 1412 """
1417 1413 Find the template matching the given template spec or style.
1418 1414 """
1419 1415
1420 1416 # ui settings
1421 1417 if not tmpl and not style: # template are stronger than style
1422 1418 tmpl = ui.config('ui', 'logtemplate')
1423 1419 if tmpl:
1424 1420 try:
1425 1421 tmpl = templater.parsestring(tmpl)
1426 1422 except SyntaxError:
1427 1423 tmpl = templater.parsestring(tmpl, quoted=False)
1428 1424 return tmpl, None
1429 1425 else:
1430 1426 style = util.expandpath(ui.config('ui', 'style', ''))
1431 1427
1432 1428 if not tmpl and style:
1433 1429 mapfile = style
1434 1430 if not os.path.split(mapfile)[0]:
1435 1431 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1436 1432 or templater.templatepath(mapfile))
1437 1433 if mapname:
1438 1434 mapfile = mapname
1439 1435 return None, mapfile
1440 1436
1441 1437 if not tmpl:
1442 1438 return None, None
1443 1439
1444 1440 # looks like a literal template?
1445 1441 if '{' in tmpl:
1446 1442 return tmpl, None
1447 1443
1448 1444 # perhaps a stock style?
1449 1445 if not os.path.split(tmpl)[0]:
1450 1446 mapname = (templater.templatepath('map-cmdline.' + tmpl)
1451 1447 or templater.templatepath(tmpl))
1452 1448 if mapname and os.path.isfile(mapname):
1453 1449 return None, mapname
1454 1450
1455 1451 # perhaps it's a reference to [templates]
1456 1452 t = ui.config('templates', tmpl)
1457 1453 if t:
1458 1454 try:
1459 1455 tmpl = templater.parsestring(t)
1460 1456 except SyntaxError:
1461 1457 tmpl = templater.parsestring(t, quoted=False)
1462 1458 return tmpl, None
1463 1459
1464 1460 if tmpl == 'list':
1465 1461 ui.write(_("available styles: %s\n") % templater.stylelist())
1466 1462 raise util.Abort(_("specify a template"))
1467 1463
1468 1464 # perhaps it's a path to a map or a template
1469 1465 if ('/' in tmpl or '\\' in tmpl) and os.path.isfile(tmpl):
1470 1466 # is it a mapfile for a style?
1471 1467 if os.path.basename(tmpl).startswith("map-"):
1472 1468 return None, os.path.realpath(tmpl)
1473 1469 tmpl = open(tmpl).read()
1474 1470 return tmpl, None
1475 1471
1476 1472 # constant string?
1477 1473 return tmpl, None
1478 1474
1479 1475 def show_changeset(ui, repo, opts, buffered=False):
1480 1476 """show one changeset using template or regular display.
1481 1477
1482 1478 Display format will be the first non-empty hit of:
1483 1479 1. option 'template'
1484 1480 2. option 'style'
1485 1481 3. [ui] setting 'logtemplate'
1486 1482 4. [ui] setting 'style'
1487 1483 If all of these values are either the unset or the empty string,
1488 1484 regular display via changeset_printer() is done.
1489 1485 """
1490 1486 # options
1491 1487 matchfn = None
1492 1488 if opts.get('patch') or opts.get('stat'):
1493 1489 matchfn = scmutil.matchall(repo)
1494 1490
1495 1491 if opts.get('template') == 'json':
1496 1492 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1497 1493
1498 1494 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1499 1495
1500 1496 if not tmpl and not mapfile:
1501 1497 return changeset_printer(ui, repo, matchfn, opts, buffered)
1502 1498
1503 1499 try:
1504 1500 t = changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile,
1505 1501 buffered)
1506 1502 except SyntaxError, inst:
1507 1503 raise util.Abort(inst.args[0])
1508 1504 return t
1509 1505
1510 1506 def showmarker(ui, marker):
1511 1507 """utility function to display obsolescence marker in a readable way
1512 1508
1513 1509 To be used by debug function."""
1514 1510 ui.write(hex(marker.precnode()))
1515 1511 for repl in marker.succnodes():
1516 1512 ui.write(' ')
1517 1513 ui.write(hex(repl))
1518 1514 ui.write(' %X ' % marker.flags())
1519 1515 parents = marker.parentnodes()
1520 1516 if parents is not None:
1521 1517 ui.write('{%s} ' % ', '.join(hex(p) for p in parents))
1522 1518 ui.write('(%s) ' % util.datestr(marker.date()))
1523 1519 ui.write('{%s}' % (', '.join('%r: %r' % t for t in
1524 1520 sorted(marker.metadata().items())
1525 1521 if t[0] != 'date')))
1526 1522 ui.write('\n')
1527 1523
1528 1524 def finddate(ui, repo, date):
1529 1525 """Find the tipmost changeset that matches the given date spec"""
1530 1526
1531 1527 df = util.matchdate(date)
1532 1528 m = scmutil.matchall(repo)
1533 1529 results = {}
1534 1530
1535 1531 def prep(ctx, fns):
1536 1532 d = ctx.date()
1537 1533 if df(d[0]):
1538 1534 results[ctx.rev()] = d
1539 1535
1540 1536 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1541 1537 rev = ctx.rev()
1542 1538 if rev in results:
1543 1539 ui.status(_("found revision %s from %s\n") %
1544 1540 (rev, util.datestr(results[rev])))
1545 1541 return str(rev)
1546 1542
1547 1543 raise util.Abort(_("revision matching date not found"))
1548 1544
1549 1545 def increasingwindows(windowsize=8, sizelimit=512):
1550 1546 while True:
1551 1547 yield windowsize
1552 1548 if windowsize < sizelimit:
1553 1549 windowsize *= 2
1554 1550
1555 1551 class FileWalkError(Exception):
1556 1552 pass
1557 1553
1558 1554 def walkfilerevs(repo, match, follow, revs, fncache):
1559 1555 '''Walks the file history for the matched files.
1560 1556
1561 1557 Returns the changeset revs that are involved in the file history.
1562 1558
1563 1559 Throws FileWalkError if the file history can't be walked using
1564 1560 filelogs alone.
1565 1561 '''
1566 1562 wanted = set()
1567 1563 copies = []
1568 1564 minrev, maxrev = min(revs), max(revs)
1569 1565 def filerevgen(filelog, last):
1570 1566 """
1571 1567 Only files, no patterns. Check the history of each file.
1572 1568
1573 1569 Examines filelog entries within minrev, maxrev linkrev range
1574 1570 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1575 1571 tuples in backwards order
1576 1572 """
1577 1573 cl_count = len(repo)
1578 1574 revs = []
1579 1575 for j in xrange(0, last + 1):
1580 1576 linkrev = filelog.linkrev(j)
1581 1577 if linkrev < minrev:
1582 1578 continue
1583 1579 # only yield rev for which we have the changelog, it can
1584 1580 # happen while doing "hg log" during a pull or commit
1585 1581 if linkrev >= cl_count:
1586 1582 break
1587 1583
1588 1584 parentlinkrevs = []
1589 1585 for p in filelog.parentrevs(j):
1590 1586 if p != nullrev:
1591 1587 parentlinkrevs.append(filelog.linkrev(p))
1592 1588 n = filelog.node(j)
1593 1589 revs.append((linkrev, parentlinkrevs,
1594 1590 follow and filelog.renamed(n)))
1595 1591
1596 1592 return reversed(revs)
1597 1593 def iterfiles():
1598 1594 pctx = repo['.']
1599 1595 for filename in match.files():
1600 1596 if follow:
1601 1597 if filename not in pctx:
1602 1598 raise util.Abort(_('cannot follow file not in parent '
1603 1599 'revision: "%s"') % filename)
1604 1600 yield filename, pctx[filename].filenode()
1605 1601 else:
1606 1602 yield filename, None
1607 1603 for filename_node in copies:
1608 1604 yield filename_node
1609 1605
1610 1606 for file_, node in iterfiles():
1611 1607 filelog = repo.file(file_)
1612 1608 if not len(filelog):
1613 1609 if node is None:
1614 1610 # A zero count may be a directory or deleted file, so
1615 1611 # try to find matching entries on the slow path.
1616 1612 if follow:
1617 1613 raise util.Abort(
1618 1614 _('cannot follow nonexistent file: "%s"') % file_)
1619 1615 raise FileWalkError("Cannot walk via filelog")
1620 1616 else:
1621 1617 continue
1622 1618
1623 1619 if node is None:
1624 1620 last = len(filelog) - 1
1625 1621 else:
1626 1622 last = filelog.rev(node)
1627 1623
1628 1624
1629 1625 # keep track of all ancestors of the file
1630 1626 ancestors = set([filelog.linkrev(last)])
1631 1627
1632 1628 # iterate from latest to oldest revision
1633 1629 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1634 1630 if not follow:
1635 1631 if rev > maxrev:
1636 1632 continue
1637 1633 else:
1638 1634 # Note that last might not be the first interesting
1639 1635 # rev to us:
1640 1636 # if the file has been changed after maxrev, we'll
1641 1637 # have linkrev(last) > maxrev, and we still need
1642 1638 # to explore the file graph
1643 1639 if rev not in ancestors:
1644 1640 continue
1645 1641 # XXX insert 1327 fix here
1646 1642 if flparentlinkrevs:
1647 1643 ancestors.update(flparentlinkrevs)
1648 1644
1649 1645 fncache.setdefault(rev, []).append(file_)
1650 1646 wanted.add(rev)
1651 1647 if copied:
1652 1648 copies.append(copied)
1653 1649
1654 1650 return wanted
1655 1651
1656 1652 def walkchangerevs(repo, match, opts, prepare):
1657 1653 '''Iterate over files and the revs in which they changed.
1658 1654
1659 1655 Callers most commonly need to iterate backwards over the history
1660 1656 in which they are interested. Doing so has awful (quadratic-looking)
1661 1657 performance, so we use iterators in a "windowed" way.
1662 1658
1663 1659 We walk a window of revisions in the desired order. Within the
1664 1660 window, we first walk forwards to gather data, then in the desired
1665 1661 order (usually backwards) to display it.
1666 1662
1667 1663 This function returns an iterator yielding contexts. Before
1668 1664 yielding each context, the iterator will first call the prepare
1669 1665 function on each context in the window in forward order.'''
1670 1666
1671 1667 follow = opts.get('follow') or opts.get('follow_first')
1672 1668 revs = _logrevs(repo, opts)
1673 1669 if not revs:
1674 1670 return []
1675 1671 wanted = set()
1676 1672 slowpath = match.anypats() or (match.files() and opts.get('removed'))
1677 1673 fncache = {}
1678 1674 change = repo.changectx
1679 1675
1680 1676 # First step is to fill wanted, the set of revisions that we want to yield.
1681 1677 # When it does not induce extra cost, we also fill fncache for revisions in
1682 1678 # wanted: a cache of filenames that were changed (ctx.files()) and that
1683 1679 # match the file filtering conditions.
1684 1680
1685 1681 if not slowpath and not match.files():
1686 1682 # No files, no patterns. Display all revs.
1687 1683 wanted = revs
1688 1684
1689 1685 if not slowpath and match.files():
1690 1686 # We only have to read through the filelog to find wanted revisions
1691 1687
1692 1688 try:
1693 1689 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1694 1690 except FileWalkError:
1695 1691 slowpath = True
1696 1692
1697 1693 # We decided to fall back to the slowpath because at least one
1698 1694 # of the paths was not a file. Check to see if at least one of them
1699 1695 # existed in history, otherwise simply return
1700 1696 for path in match.files():
1701 1697 if path == '.' or path in repo.store:
1702 1698 break
1703 1699 else:
1704 1700 return []
1705 1701
1706 1702 if slowpath:
1707 1703 # We have to read the changelog to match filenames against
1708 1704 # changed files
1709 1705
1710 1706 if follow:
1711 1707 raise util.Abort(_('can only follow copies/renames for explicit '
1712 1708 'filenames'))
1713 1709
1714 1710 # The slow path checks files modified in every changeset.
1715 1711 # This is really slow on large repos, so compute the set lazily.
1716 1712 class lazywantedset(object):
1717 1713 def __init__(self):
1718 1714 self.set = set()
1719 1715 self.revs = set(revs)
1720 1716
1721 1717 # No need to worry about locality here because it will be accessed
1722 1718 # in the same order as the increasing window below.
1723 1719 def __contains__(self, value):
1724 1720 if value in self.set:
1725 1721 return True
1726 1722 elif not value in self.revs:
1727 1723 return False
1728 1724 else:
1729 1725 self.revs.discard(value)
1730 1726 ctx = change(value)
1731 1727 matches = filter(match, ctx.files())
1732 1728 if matches:
1733 1729 fncache[value] = matches
1734 1730 self.set.add(value)
1735 1731 return True
1736 1732 return False
1737 1733
1738 1734 def discard(self, value):
1739 1735 self.revs.discard(value)
1740 1736 self.set.discard(value)
1741 1737
1742 1738 wanted = lazywantedset()
1743 1739
1744 1740 class followfilter(object):
1745 1741 def __init__(self, onlyfirst=False):
1746 1742 self.startrev = nullrev
1747 1743 self.roots = set()
1748 1744 self.onlyfirst = onlyfirst
1749 1745
1750 1746 def match(self, rev):
1751 1747 def realparents(rev):
1752 1748 if self.onlyfirst:
1753 1749 return repo.changelog.parentrevs(rev)[0:1]
1754 1750 else:
1755 1751 return filter(lambda x: x != nullrev,
1756 1752 repo.changelog.parentrevs(rev))
1757 1753
1758 1754 if self.startrev == nullrev:
1759 1755 self.startrev = rev
1760 1756 return True
1761 1757
1762 1758 if rev > self.startrev:
1763 1759 # forward: all descendants
1764 1760 if not self.roots:
1765 1761 self.roots.add(self.startrev)
1766 1762 for parent in realparents(rev):
1767 1763 if parent in self.roots:
1768 1764 self.roots.add(rev)
1769 1765 return True
1770 1766 else:
1771 1767 # backwards: all parents
1772 1768 if not self.roots:
1773 1769 self.roots.update(realparents(self.startrev))
1774 1770 if rev in self.roots:
1775 1771 self.roots.remove(rev)
1776 1772 self.roots.update(realparents(rev))
1777 1773 return True
1778 1774
1779 1775 return False
1780 1776
1781 1777 # it might be worthwhile to do this in the iterator if the rev range
1782 1778 # is descending and the prune args are all within that range
1783 1779 for rev in opts.get('prune', ()):
1784 1780 rev = repo[rev].rev()
1785 1781 ff = followfilter()
1786 1782 stop = min(revs[0], revs[-1])
1787 1783 for x in xrange(rev, stop - 1, -1):
1788 1784 if ff.match(x):
1789 1785 wanted = wanted - [x]
1790 1786
1791 1787 # Now that wanted is correctly initialized, we can iterate over the
1792 1788 # revision range, yielding only revisions in wanted.
1793 1789 def iterate():
1794 1790 if follow and not match.files():
1795 1791 ff = followfilter(onlyfirst=opts.get('follow_first'))
1796 1792 def want(rev):
1797 1793 return ff.match(rev) and rev in wanted
1798 1794 else:
1799 1795 def want(rev):
1800 1796 return rev in wanted
1801 1797
1802 1798 it = iter(revs)
1803 1799 stopiteration = False
1804 1800 for windowsize in increasingwindows():
1805 1801 nrevs = []
1806 1802 for i in xrange(windowsize):
1807 1803 try:
1808 1804 rev = it.next()
1809 1805 if want(rev):
1810 1806 nrevs.append(rev)
1811 1807 except (StopIteration):
1812 1808 stopiteration = True
1813 1809 break
1814 1810 for rev in sorted(nrevs):
1815 1811 fns = fncache.get(rev)
1816 1812 ctx = change(rev)
1817 1813 if not fns:
1818 1814 def fns_generator():
1819 1815 for f in ctx.files():
1820 1816 if match(f):
1821 1817 yield f
1822 1818 fns = fns_generator()
1823 1819 prepare(ctx, fns)
1824 1820 for rev in nrevs:
1825 1821 yield change(rev)
1826 1822
1827 1823 if stopiteration:
1828 1824 break
1829 1825
1830 1826 return iterate()
1831 1827
1832 1828 def _makefollowlogfilematcher(repo, files, followfirst):
1833 1829 # When displaying a revision with --patch --follow FILE, we have
1834 1830 # to know which file of the revision must be diffed. With
1835 1831 # --follow, we want the names of the ancestors of FILE in the
1836 1832 # revision, stored in "fcache". "fcache" is populated by
1837 1833 # reproducing the graph traversal already done by --follow revset
1838 1834 # and relating linkrevs to file names (which is not "correct" but
1839 1835 # good enough).
1840 1836 fcache = {}
1841 1837 fcacheready = [False]
1842 1838 pctx = repo['.']
1843 1839
1844 1840 def populate():
1845 1841 for fn in files:
1846 1842 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
1847 1843 for c in i:
1848 1844 fcache.setdefault(c.linkrev(), set()).add(c.path())
1849 1845
1850 1846 def filematcher(rev):
1851 1847 if not fcacheready[0]:
1852 1848 # Lazy initialization
1853 1849 fcacheready[0] = True
1854 1850 populate()
1855 1851 return scmutil.matchfiles(repo, fcache.get(rev, []))
1856 1852
1857 1853 return filematcher
1858 1854
1859 1855 def _makenofollowlogfilematcher(repo, pats, opts):
1860 1856 '''hook for extensions to override the filematcher for non-follow cases'''
1861 1857 return None
1862 1858
1863 1859 def _makelogrevset(repo, pats, opts, revs):
1864 1860 """Return (expr, filematcher) where expr is a revset string built
1865 1861 from log options and file patterns or None. If --stat or --patch
1866 1862 are not passed filematcher is None. Otherwise it is a callable
1867 1863 taking a revision number and returning a match objects filtering
1868 1864 the files to be detailed when displaying the revision.
1869 1865 """
1870 1866 opt2revset = {
1871 1867 'no_merges': ('not merge()', None),
1872 1868 'only_merges': ('merge()', None),
1873 1869 '_ancestors': ('ancestors(%(val)s)', None),
1874 1870 '_fancestors': ('_firstancestors(%(val)s)', None),
1875 1871 '_descendants': ('descendants(%(val)s)', None),
1876 1872 '_fdescendants': ('_firstdescendants(%(val)s)', None),
1877 1873 '_matchfiles': ('_matchfiles(%(val)s)', None),
1878 1874 'date': ('date(%(val)r)', None),
1879 1875 'branch': ('branch(%(val)r)', ' or '),
1880 1876 '_patslog': ('filelog(%(val)r)', ' or '),
1881 1877 '_patsfollow': ('follow(%(val)r)', ' or '),
1882 1878 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
1883 1879 'keyword': ('keyword(%(val)r)', ' or '),
1884 1880 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
1885 1881 'user': ('user(%(val)r)', ' or '),
1886 1882 }
1887 1883
1888 1884 opts = dict(opts)
1889 1885 # follow or not follow?
1890 1886 follow = opts.get('follow') or opts.get('follow_first')
1891 1887 if opts.get('follow_first'):
1892 1888 followfirst = 1
1893 1889 else:
1894 1890 followfirst = 0
1895 1891 # --follow with FILE behaviour depends on revs...
1896 1892 it = iter(revs)
1897 1893 startrev = it.next()
1898 1894 try:
1899 1895 followdescendants = startrev < it.next()
1900 1896 except (StopIteration):
1901 1897 followdescendants = False
1902 1898
1903 1899 # branch and only_branch are really aliases and must be handled at
1904 1900 # the same time
1905 1901 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
1906 1902 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
1907 1903 # pats/include/exclude are passed to match.match() directly in
1908 1904 # _matchfiles() revset but walkchangerevs() builds its matcher with
1909 1905 # scmutil.match(). The difference is input pats are globbed on
1910 1906 # platforms without shell expansion (windows).
1911 1907 pctx = repo[None]
1912 1908 match, pats = scmutil.matchandpats(pctx, pats, opts)
1913 1909 slowpath = match.anypats() or (match.files() and opts.get('removed'))
1914 1910 if not slowpath:
1915 1911 for f in match.files():
1916 1912 if follow and f not in pctx:
1917 1913 # If the file exists, it may be a directory, so let it
1918 1914 # take the slow path.
1919 1915 if os.path.exists(repo.wjoin(f)):
1920 1916 slowpath = True
1921 1917 continue
1922 1918 else:
1923 1919 raise util.Abort(_('cannot follow file not in parent '
1924 1920 'revision: "%s"') % f)
1925 1921 filelog = repo.file(f)
1926 1922 if not filelog:
1927 1923 # A zero count may be a directory or deleted file, so
1928 1924 # try to find matching entries on the slow path.
1929 1925 if follow:
1930 1926 raise util.Abort(
1931 1927 _('cannot follow nonexistent file: "%s"') % f)
1932 1928 slowpath = True
1933 1929
1934 1930 # We decided to fall back to the slowpath because at least one
1935 1931 # of the paths was not a file. Check to see if at least one of them
1936 1932 # existed in history - in that case, we'll continue down the
1937 1933 # slowpath; otherwise, we can turn off the slowpath
1938 1934 if slowpath:
1939 1935 for path in match.files():
1940 1936 if path == '.' or path in repo.store:
1941 1937 break
1942 1938 else:
1943 1939 slowpath = False
1944 1940
1945 1941 fpats = ('_patsfollow', '_patsfollowfirst')
1946 1942 fnopats = (('_ancestors', '_fancestors'),
1947 1943 ('_descendants', '_fdescendants'))
1948 1944 if slowpath:
1949 1945 # See walkchangerevs() slow path.
1950 1946 #
1951 1947 # pats/include/exclude cannot be represented as separate
1952 1948 # revset expressions as their filtering logic applies at file
1953 1949 # level. For instance "-I a -X a" matches a revision touching
1954 1950 # "a" and "b" while "file(a) and not file(b)" does
1955 1951 # not. Besides, filesets are evaluated against the working
1956 1952 # directory.
1957 1953 matchargs = ['r:', 'd:relpath']
1958 1954 for p in pats:
1959 1955 matchargs.append('p:' + p)
1960 1956 for p in opts.get('include', []):
1961 1957 matchargs.append('i:' + p)
1962 1958 for p in opts.get('exclude', []):
1963 1959 matchargs.append('x:' + p)
1964 1960 matchargs = ','.join(('%r' % p) for p in matchargs)
1965 1961 opts['_matchfiles'] = matchargs
1966 1962 if follow:
1967 1963 opts[fnopats[0][followfirst]] = '.'
1968 1964 else:
1969 1965 if follow:
1970 1966 if pats:
1971 1967 # follow() revset interprets its file argument as a
1972 1968 # manifest entry, so use match.files(), not pats.
1973 1969 opts[fpats[followfirst]] = list(match.files())
1974 1970 else:
1975 1971 op = fnopats[followdescendants][followfirst]
1976 1972 opts[op] = 'rev(%d)' % startrev
1977 1973 else:
1978 1974 opts['_patslog'] = list(pats)
1979 1975
1980 1976 filematcher = None
1981 1977 if opts.get('patch') or opts.get('stat'):
1982 1978 # When following files, track renames via a special matcher.
1983 1979 # If we're forced to take the slowpath it means we're following
1984 1980 # at least one pattern/directory, so don't bother with rename tracking.
1985 1981 if follow and not match.always() and not slowpath:
1986 1982 # _makefollowlogfilematcher expects its files argument to be
1987 1983 # relative to the repo root, so use match.files(), not pats.
1988 1984 filematcher = _makefollowlogfilematcher(repo, match.files(),
1989 1985 followfirst)
1990 1986 else:
1991 1987 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
1992 1988 if filematcher is None:
1993 1989 filematcher = lambda rev: match
1994 1990
1995 1991 expr = []
1996 1992 for op, val in sorted(opts.iteritems()):
1997 1993 if not val:
1998 1994 continue
1999 1995 if op not in opt2revset:
2000 1996 continue
2001 1997 revop, andor = opt2revset[op]
2002 1998 if '%(val)' not in revop:
2003 1999 expr.append(revop)
2004 2000 else:
2005 2001 if not isinstance(val, list):
2006 2002 e = revop % {'val': val}
2007 2003 else:
2008 2004 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2009 2005 expr.append(e)
2010 2006
2011 2007 if expr:
2012 2008 expr = '(' + ' and '.join(expr) + ')'
2013 2009 else:
2014 2010 expr = None
2015 2011 return expr, filematcher
2016 2012
2017 2013 def _logrevs(repo, opts):
2018 2014 # Default --rev value depends on --follow but --follow behaviour
2019 2015 # depends on revisions resolved from --rev...
2020 2016 follow = opts.get('follow') or opts.get('follow_first')
2021 2017 if opts.get('rev'):
2022 2018 revs = scmutil.revrange(repo, opts['rev'])
2023 2019 elif follow and repo.dirstate.p1() == nullid:
2024 2020 revs = revset.baseset()
2025 2021 elif follow:
2026 2022 revs = repo.revs('reverse(:.)')
2027 2023 else:
2028 2024 revs = revset.spanset(repo)
2029 2025 revs.reverse()
2030 2026 return revs
2031 2027
2032 2028 def getgraphlogrevs(repo, pats, opts):
2033 2029 """Return (revs, expr, filematcher) where revs is an iterable of
2034 2030 revision numbers, expr is a revset string built from log options
2035 2031 and file patterns or None, and used to filter 'revs'. If --stat or
2036 2032 --patch are not passed filematcher is None. Otherwise it is a
2037 2033 callable taking a revision number and returning a match objects
2038 2034 filtering the files to be detailed when displaying the revision.
2039 2035 """
2040 2036 limit = loglimit(opts)
2041 2037 revs = _logrevs(repo, opts)
2042 2038 if not revs:
2043 2039 return revset.baseset(), None, None
2044 2040 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2045 2041 if opts.get('rev'):
2046 2042 # User-specified revs might be unsorted, but don't sort before
2047 2043 # _makelogrevset because it might depend on the order of revs
2048 2044 revs.sort(reverse=True)
2049 2045 if expr:
2050 2046 # Revset matchers often operate faster on revisions in changelog
2051 2047 # order, because most filters deal with the changelog.
2052 2048 revs.reverse()
2053 2049 matcher = revset.match(repo.ui, expr)
2054 2050 # Revset matches can reorder revisions. "A or B" typically returns
2055 2051 # returns the revision matching A then the revision matching B. Sort
2056 2052 # again to fix that.
2057 2053 revs = matcher(repo, revs)
2058 2054 revs.sort(reverse=True)
2059 2055 if limit is not None:
2060 2056 limitedrevs = []
2061 2057 for idx, rev in enumerate(revs):
2062 2058 if idx >= limit:
2063 2059 break
2064 2060 limitedrevs.append(rev)
2065 2061 revs = revset.baseset(limitedrevs)
2066 2062
2067 2063 return revs, expr, filematcher
2068 2064
2069 2065 def getlogrevs(repo, pats, opts):
2070 2066 """Return (revs, expr, filematcher) where revs is an iterable of
2071 2067 revision numbers, expr is a revset string built from log options
2072 2068 and file patterns or None, and used to filter 'revs'. If --stat or
2073 2069 --patch are not passed filematcher is None. Otherwise it is a
2074 2070 callable taking a revision number and returning a match objects
2075 2071 filtering the files to be detailed when displaying the revision.
2076 2072 """
2077 2073 limit = loglimit(opts)
2078 2074 revs = _logrevs(repo, opts)
2079 2075 if not revs:
2080 2076 return revset.baseset([]), None, None
2081 2077 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2082 2078 if expr:
2083 2079 # Revset matchers often operate faster on revisions in changelog
2084 2080 # order, because most filters deal with the changelog.
2085 2081 if not opts.get('rev'):
2086 2082 revs.reverse()
2087 2083 matcher = revset.match(repo.ui, expr)
2088 2084 # Revset matches can reorder revisions. "A or B" typically returns
2089 2085 # returns the revision matching A then the revision matching B. Sort
2090 2086 # again to fix that.
2091 2087 revs = matcher(repo, revs)
2092 2088 if not opts.get('rev'):
2093 2089 revs.sort(reverse=True)
2094 2090 if limit is not None:
2095 2091 count = 0
2096 2092 limitedrevs = []
2097 2093 it = iter(revs)
2098 2094 while count < limit:
2099 2095 try:
2100 2096 limitedrevs.append(it.next())
2101 2097 except (StopIteration):
2102 2098 break
2103 2099 count += 1
2104 2100 revs = revset.baseset(limitedrevs)
2105 2101
2106 2102 return revs, expr, filematcher
2107 2103
2108 2104 def displaygraph(ui, dag, displayer, showparents, edgefn, getrenamed=None,
2109 2105 filematcher=None):
2110 2106 seen, state = [], graphmod.asciistate()
2111 2107 for rev, type, ctx, parents in dag:
2112 2108 char = 'o'
2113 2109 if ctx.node() in showparents:
2114 2110 char = '@'
2115 2111 elif ctx.obsolete():
2116 2112 char = 'x'
2117 2113 elif ctx.closesbranch():
2118 2114 char = '_'
2119 2115 copies = None
2120 2116 if getrenamed and ctx.rev():
2121 2117 copies = []
2122 2118 for fn in ctx.files():
2123 2119 rename = getrenamed(fn, ctx.rev())
2124 2120 if rename:
2125 2121 copies.append((fn, rename[0]))
2126 2122 revmatchfn = None
2127 2123 if filematcher is not None:
2128 2124 revmatchfn = filematcher(ctx.rev())
2129 2125 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2130 2126 lines = displayer.hunk.pop(rev).split('\n')
2131 2127 if not lines[-1]:
2132 2128 del lines[-1]
2133 2129 displayer.flush(rev)
2134 2130 edges = edgefn(type, char, lines, seen, rev, parents)
2135 2131 for type, char, lines, coldata in edges:
2136 2132 graphmod.ascii(ui, state, type, char, lines, coldata)
2137 2133 displayer.close()
2138 2134
2139 2135 def graphlog(ui, repo, *pats, **opts):
2140 2136 # Parameters are identical to log command ones
2141 2137 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2142 2138 revdag = graphmod.dagwalker(repo, revs)
2143 2139
2144 2140 getrenamed = None
2145 2141 if opts.get('copies'):
2146 2142 endrev = None
2147 2143 if opts.get('rev'):
2148 2144 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2149 2145 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2150 2146 displayer = show_changeset(ui, repo, opts, buffered=True)
2151 2147 showparents = [ctx.node() for ctx in repo[None].parents()]
2152 2148 displaygraph(ui, revdag, displayer, showparents,
2153 2149 graphmod.asciiedges, getrenamed, filematcher)
2154 2150
2155 2151 def checkunsupportedgraphflags(pats, opts):
2156 2152 for op in ["newest_first"]:
2157 2153 if op in opts and opts[op]:
2158 2154 raise util.Abort(_("-G/--graph option is incompatible with --%s")
2159 2155 % op.replace("_", "-"))
2160 2156
2161 2157 def graphrevs(repo, nodes, opts):
2162 2158 limit = loglimit(opts)
2163 2159 nodes.reverse()
2164 2160 if limit is not None:
2165 2161 nodes = nodes[:limit]
2166 2162 return graphmod.nodes(repo, nodes)
2167 2163
2168 2164 def add(ui, repo, match, prefix, explicitonly, **opts):
2169 2165 join = lambda f: os.path.join(prefix, f)
2170 2166 bad = []
2171 2167 oldbad = match.bad
2172 2168 match.bad = lambda x, y: bad.append(x) or oldbad(x, y)
2173 2169 names = []
2174 2170 wctx = repo[None]
2175 2171 cca = None
2176 2172 abort, warn = scmutil.checkportabilityalert(ui)
2177 2173 if abort or warn:
2178 2174 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2179 2175 for f in wctx.walk(match):
2180 2176 exact = match.exact(f)
2181 2177 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2182 2178 if cca:
2183 2179 cca(f)
2184 2180 names.append(f)
2185 2181 if ui.verbose or not exact:
2186 2182 ui.status(_('adding %s\n') % match.rel(f))
2187 2183
2188 2184 for subpath in sorted(wctx.substate):
2189 2185 sub = wctx.sub(subpath)
2190 2186 try:
2191 2187 submatch = matchmod.narrowmatcher(subpath, match)
2192 2188 if opts.get('subrepos'):
2193 2189 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2194 2190 else:
2195 2191 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2196 2192 except error.LookupError:
2197 2193 ui.status(_("skipping missing subrepository: %s\n")
2198 2194 % join(subpath))
2199 2195
2200 2196 if not opts.get('dry_run'):
2201 2197 rejected = wctx.add(names, prefix)
2202 2198 bad.extend(f for f in rejected if f in match.files())
2203 2199 return bad
2204 2200
2205 2201 def forget(ui, repo, match, prefix, explicitonly):
2206 2202 join = lambda f: os.path.join(prefix, f)
2207 2203 bad = []
2208 2204 oldbad = match.bad
2209 2205 match.bad = lambda x, y: bad.append(x) or oldbad(x, y)
2210 2206 wctx = repo[None]
2211 2207 forgot = []
2212 2208 s = repo.status(match=match, clean=True)
2213 2209 forget = sorted(s[0] + s[1] + s[3] + s[6])
2214 2210 if explicitonly:
2215 2211 forget = [f for f in forget if match.exact(f)]
2216 2212
2217 2213 for subpath in sorted(wctx.substate):
2218 2214 sub = wctx.sub(subpath)
2219 2215 try:
2220 2216 submatch = matchmod.narrowmatcher(subpath, match)
2221 2217 subbad, subforgot = sub.forget(submatch, prefix)
2222 2218 bad.extend([subpath + '/' + f for f in subbad])
2223 2219 forgot.extend([subpath + '/' + f for f in subforgot])
2224 2220 except error.LookupError:
2225 2221 ui.status(_("skipping missing subrepository: %s\n")
2226 2222 % join(subpath))
2227 2223
2228 2224 if not explicitonly:
2229 2225 for f in match.files():
2230 2226 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2231 2227 if f not in forgot:
2232 2228 if repo.wvfs.exists(f):
2233 2229 ui.warn(_('not removing %s: '
2234 2230 'file is already untracked\n')
2235 2231 % match.rel(f))
2236 2232 bad.append(f)
2237 2233
2238 2234 for f in forget:
2239 2235 if ui.verbose or not match.exact(f):
2240 2236 ui.status(_('removing %s\n') % match.rel(f))
2241 2237
2242 2238 rejected = wctx.forget(forget, prefix)
2243 2239 bad.extend(f for f in rejected if f in match.files())
2244 2240 forgot.extend(f for f in forget if f not in rejected)
2245 2241 return bad, forgot
2246 2242
2247 2243 def files(ui, ctx, m, fm, fmt):
2248 2244 rev = ctx.rev()
2249 2245 ret = 1
2250 2246 ds = ctx.repo().dirstate
2251 2247
2252 2248 for f in ctx.matches(m):
2253 2249 if rev is None and ds[f] == 'r':
2254 2250 continue
2255 2251 fm.startitem()
2256 2252 if ui.verbose:
2257 2253 fc = ctx[f]
2258 2254 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2259 2255 fm.data(abspath=f)
2260 2256 fm.write('path', fmt, m.rel(f))
2261 2257 ret = 0
2262 2258
2263 2259 return ret
2264 2260
2265 2261 def remove(ui, repo, m, prefix, after, force, subrepos):
2266 2262 join = lambda f: os.path.join(prefix, f)
2267 2263 ret = 0
2268 2264 s = repo.status(match=m, clean=True)
2269 2265 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2270 2266
2271 2267 wctx = repo[None]
2272 2268
2273 2269 for subpath in sorted(wctx.substate):
2274 2270 def matchessubrepo(matcher, subpath):
2275 2271 if matcher.exact(subpath):
2276 2272 return True
2277 2273 for f in matcher.files():
2278 2274 if f.startswith(subpath):
2279 2275 return True
2280 2276 return False
2281 2277
2282 2278 if subrepos or matchessubrepo(m, subpath):
2283 2279 sub = wctx.sub(subpath)
2284 2280 try:
2285 2281 submatch = matchmod.narrowmatcher(subpath, m)
2286 2282 if sub.removefiles(submatch, prefix, after, force, subrepos):
2287 2283 ret = 1
2288 2284 except error.LookupError:
2289 2285 ui.status(_("skipping missing subrepository: %s\n")
2290 2286 % join(subpath))
2291 2287
2292 2288 # warn about failure to delete explicit files/dirs
2293 2289 deleteddirs = scmutil.dirs(deleted)
2294 2290 for f in m.files():
2295 2291 def insubrepo():
2296 2292 for subpath in wctx.substate:
2297 2293 if f.startswith(subpath):
2298 2294 return True
2299 2295 return False
2300 2296
2301 2297 isdir = f in deleteddirs or f in wctx.dirs()
2302 2298 if f in repo.dirstate or isdir or f == '.' or insubrepo():
2303 2299 continue
2304 2300
2305 2301 if repo.wvfs.exists(f):
2306 2302 if repo.wvfs.isdir(f):
2307 2303 ui.warn(_('not removing %s: no tracked files\n')
2308 2304 % m.rel(f))
2309 2305 else:
2310 2306 ui.warn(_('not removing %s: file is untracked\n')
2311 2307 % m.rel(f))
2312 2308 # missing files will generate a warning elsewhere
2313 2309 ret = 1
2314 2310
2315 2311 if force:
2316 2312 list = modified + deleted + clean + added
2317 2313 elif after:
2318 2314 list = deleted
2319 2315 for f in modified + added + clean:
2320 2316 ui.warn(_('not removing %s: file still exists\n') % m.rel(f))
2321 2317 ret = 1
2322 2318 else:
2323 2319 list = deleted + clean
2324 2320 for f in modified:
2325 2321 ui.warn(_('not removing %s: file is modified (use -f'
2326 2322 ' to force removal)\n') % m.rel(f))
2327 2323 ret = 1
2328 2324 for f in added:
2329 2325 ui.warn(_('not removing %s: file has been marked for add'
2330 2326 ' (use forget to undo)\n') % m.rel(f))
2331 2327 ret = 1
2332 2328
2333 2329 for f in sorted(list):
2334 2330 if ui.verbose or not m.exact(f):
2335 2331 ui.status(_('removing %s\n') % m.rel(f))
2336 2332
2337 2333 wlock = repo.wlock()
2338 2334 try:
2339 2335 if not after:
2340 2336 for f in list:
2341 2337 if f in added:
2342 2338 continue # we never unlink added files on remove
2343 2339 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
2344 2340 repo[None].forget(list)
2345 2341 finally:
2346 2342 wlock.release()
2347 2343
2348 2344 return ret
2349 2345
2350 2346 def cat(ui, repo, ctx, matcher, prefix, **opts):
2351 2347 err = 1
2352 2348
2353 2349 def write(path):
2354 2350 fp = makefileobj(repo, opts.get('output'), ctx.node(),
2355 2351 pathname=os.path.join(prefix, path))
2356 2352 data = ctx[path].data()
2357 2353 if opts.get('decode'):
2358 2354 data = repo.wwritedata(path, data)
2359 2355 fp.write(data)
2360 2356 fp.close()
2361 2357
2362 2358 # Automation often uses hg cat on single files, so special case it
2363 2359 # for performance to avoid the cost of parsing the manifest.
2364 2360 if len(matcher.files()) == 1 and not matcher.anypats():
2365 2361 file = matcher.files()[0]
2366 2362 mf = repo.manifest
2367 2363 mfnode = ctx._changeset[0]
2368 2364 if mf.find(mfnode, file)[0]:
2369 2365 write(file)
2370 2366 return 0
2371 2367
2372 2368 # Don't warn about "missing" files that are really in subrepos
2373 2369 bad = matcher.bad
2374 2370
2375 2371 def badfn(path, msg):
2376 2372 for subpath in ctx.substate:
2377 2373 if path.startswith(subpath):
2378 2374 return
2379 2375 bad(path, msg)
2380 2376
2381 2377 matcher.bad = badfn
2382 2378
2383 2379 for abs in ctx.walk(matcher):
2384 2380 write(abs)
2385 2381 err = 0
2386 2382
2387 2383 matcher.bad = bad
2388 2384
2389 2385 for subpath in sorted(ctx.substate):
2390 2386 sub = ctx.sub(subpath)
2391 2387 try:
2392 2388 submatch = matchmod.narrowmatcher(subpath, matcher)
2393 2389
2394 2390 if not sub.cat(submatch, os.path.join(prefix, sub._path),
2395 2391 **opts):
2396 2392 err = 0
2397 2393 except error.RepoLookupError:
2398 2394 ui.status(_("skipping missing subrepository: %s\n")
2399 2395 % os.path.join(prefix, subpath))
2400 2396
2401 2397 return err
2402 2398
2403 2399 def commit(ui, repo, commitfunc, pats, opts):
2404 2400 '''commit the specified files or all outstanding changes'''
2405 2401 date = opts.get('date')
2406 2402 if date:
2407 2403 opts['date'] = util.parsedate(date)
2408 2404 message = logmessage(ui, opts)
2409 2405 matcher = scmutil.match(repo[None], pats, opts)
2410 2406
2411 2407 # extract addremove carefully -- this function can be called from a command
2412 2408 # that doesn't support addremove
2413 2409 if opts.get('addremove'):
2414 2410 if scmutil.addremove(repo, matcher, "", opts) != 0:
2415 2411 raise util.Abort(
2416 2412 _("failed to mark all new/missing files as added/removed"))
2417 2413
2418 2414 return commitfunc(ui, repo, message, matcher, opts)
2419 2415
2420 2416 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2421 2417 # amend will reuse the existing user if not specified, but the obsolete
2422 2418 # marker creation requires that the current user's name is specified.
2423 2419 if obsolete._enabled:
2424 2420 ui.username() # raise exception if username not set
2425 2421
2426 2422 ui.note(_('amending changeset %s\n') % old)
2427 2423 base = old.p1()
2428 2424
2429 2425 wlock = lock = newid = None
2430 2426 try:
2431 2427 wlock = repo.wlock()
2432 2428 lock = repo.lock()
2433 2429 tr = repo.transaction('amend')
2434 2430 try:
2435 2431 # See if we got a message from -m or -l, if not, open the editor
2436 2432 # with the message of the changeset to amend
2437 2433 message = logmessage(ui, opts)
2438 2434 # ensure logfile does not conflict with later enforcement of the
2439 2435 # message. potential logfile content has been processed by
2440 2436 # `logmessage` anyway.
2441 2437 opts.pop('logfile')
2442 2438 # First, do a regular commit to record all changes in the working
2443 2439 # directory (if there are any)
2444 2440 ui.callhooks = False
2445 2441 currentbookmark = repo._bookmarkcurrent
2446 2442 try:
2447 2443 repo._bookmarkcurrent = None
2448 2444 opts['message'] = 'temporary amend commit for %s' % old
2449 2445 node = commit(ui, repo, commitfunc, pats, opts)
2450 2446 finally:
2451 2447 repo._bookmarkcurrent = currentbookmark
2452 2448 ui.callhooks = True
2453 2449 ctx = repo[node]
2454 2450
2455 2451 # Participating changesets:
2456 2452 #
2457 2453 # node/ctx o - new (intermediate) commit that contains changes
2458 2454 # | from working dir to go into amending commit
2459 2455 # | (or a workingctx if there were no changes)
2460 2456 # |
2461 2457 # old o - changeset to amend
2462 2458 # |
2463 2459 # base o - parent of amending changeset
2464 2460
2465 2461 # Update extra dict from amended commit (e.g. to preserve graft
2466 2462 # source)
2467 2463 extra.update(old.extra())
2468 2464
2469 2465 # Also update it from the intermediate commit or from the wctx
2470 2466 extra.update(ctx.extra())
2471 2467
2472 2468 if len(old.parents()) > 1:
2473 2469 # ctx.files() isn't reliable for merges, so fall back to the
2474 2470 # slower repo.status() method
2475 2471 files = set([fn for st in repo.status(base, old)[:3]
2476 2472 for fn in st])
2477 2473 else:
2478 2474 files = set(old.files())
2479 2475
2480 2476 # Second, we use either the commit we just did, or if there were no
2481 2477 # changes the parent of the working directory as the version of the
2482 2478 # files in the final amend commit
2483 2479 if node:
2484 2480 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2485 2481
2486 2482 user = ctx.user()
2487 2483 date = ctx.date()
2488 2484 # Recompute copies (avoid recording a -> b -> a)
2489 2485 copied = copies.pathcopies(base, ctx)
2490 2486 if old.p2:
2491 2487 copied.update(copies.pathcopies(old.p2(), ctx))
2492 2488
2493 2489 # Prune files which were reverted by the updates: if old
2494 2490 # introduced file X and our intermediate commit, node,
2495 2491 # renamed that file, then those two files are the same and
2496 2492 # we can discard X from our list of files. Likewise if X
2497 2493 # was deleted, it's no longer relevant
2498 2494 files.update(ctx.files())
2499 2495
2500 2496 def samefile(f):
2501 2497 if f in ctx.manifest():
2502 2498 a = ctx.filectx(f)
2503 2499 if f in base.manifest():
2504 2500 b = base.filectx(f)
2505 2501 return (not a.cmp(b)
2506 2502 and a.flags() == b.flags())
2507 2503 else:
2508 2504 return False
2509 2505 else:
2510 2506 return f not in base.manifest()
2511 2507 files = [f for f in files if not samefile(f)]
2512 2508
2513 2509 def filectxfn(repo, ctx_, path):
2514 2510 try:
2515 2511 fctx = ctx[path]
2516 2512 flags = fctx.flags()
2517 2513 mctx = context.memfilectx(repo,
2518 2514 fctx.path(), fctx.data(),
2519 2515 islink='l' in flags,
2520 2516 isexec='x' in flags,
2521 2517 copied=copied.get(path))
2522 2518 return mctx
2523 2519 except KeyError:
2524 2520 return None
2525 2521 else:
2526 2522 ui.note(_('copying changeset %s to %s\n') % (old, base))
2527 2523
2528 2524 # Use version of files as in the old cset
2529 2525 def filectxfn(repo, ctx_, path):
2530 2526 try:
2531 2527 return old.filectx(path)
2532 2528 except KeyError:
2533 2529 return None
2534 2530
2535 2531 user = opts.get('user') or old.user()
2536 2532 date = opts.get('date') or old.date()
2537 2533 editform = mergeeditform(old, 'commit.amend')
2538 2534 editor = getcommiteditor(editform=editform, **opts)
2539 2535 if not message:
2540 2536 editor = getcommiteditor(edit=True, editform=editform)
2541 2537 message = old.description()
2542 2538
2543 2539 pureextra = extra.copy()
2544 2540 extra['amend_source'] = old.hex()
2545 2541
2546 2542 new = context.memctx(repo,
2547 2543 parents=[base.node(), old.p2().node()],
2548 2544 text=message,
2549 2545 files=files,
2550 2546 filectxfn=filectxfn,
2551 2547 user=user,
2552 2548 date=date,
2553 2549 extra=extra,
2554 2550 editor=editor)
2555 2551
2556 2552 newdesc = changelog.stripdesc(new.description())
2557 2553 if ((not node)
2558 2554 and newdesc == old.description()
2559 2555 and user == old.user()
2560 2556 and date == old.date()
2561 2557 and pureextra == old.extra()):
2562 2558 # nothing changed. continuing here would create a new node
2563 2559 # anyway because of the amend_source noise.
2564 2560 #
2565 2561 # This not what we expect from amend.
2566 2562 return old.node()
2567 2563
2568 2564 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2569 2565 try:
2570 2566 if opts.get('secret'):
2571 2567 commitphase = 'secret'
2572 2568 else:
2573 2569 commitphase = old.phase()
2574 2570 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2575 2571 newid = repo.commitctx(new)
2576 2572 finally:
2577 2573 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2578 2574 if newid != old.node():
2579 2575 # Reroute the working copy parent to the new changeset
2580 2576 repo.setparents(newid, nullid)
2581 2577
2582 2578 # Move bookmarks from old parent to amend commit
2583 2579 bms = repo.nodebookmarks(old.node())
2584 2580 if bms:
2585 2581 marks = repo._bookmarks
2586 2582 for bm in bms:
2587 2583 marks[bm] = newid
2588 2584 marks.write()
2589 2585 #commit the whole amend process
2590 2586 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2591 2587 if createmarkers and newid != old.node():
2592 2588 # mark the new changeset as successor of the rewritten one
2593 2589 new = repo[newid]
2594 2590 obs = [(old, (new,))]
2595 2591 if node:
2596 2592 obs.append((ctx, ()))
2597 2593
2598 2594 obsolete.createmarkers(repo, obs)
2599 2595 tr.close()
2600 2596 finally:
2601 2597 tr.release()
2602 2598 if not createmarkers and newid != old.node():
2603 2599 # Strip the intermediate commit (if there was one) and the amended
2604 2600 # commit
2605 2601 if node:
2606 2602 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2607 2603 ui.note(_('stripping amended changeset %s\n') % old)
2608 2604 repair.strip(ui, repo, old.node(), topic='amend-backup')
2609 2605 finally:
2610 2606 if newid is None:
2611 2607 repo.dirstate.invalidate()
2612 2608 lockmod.release(lock, wlock)
2613 2609 return newid
2614 2610
2615 2611 def commiteditor(repo, ctx, subs, editform=''):
2616 2612 if ctx.description():
2617 2613 return ctx.description()
2618 2614 return commitforceeditor(repo, ctx, subs, editform=editform)
2619 2615
2620 2616 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2621 2617 editform=''):
2622 2618 if not extramsg:
2623 2619 extramsg = _("Leave message empty to abort commit.")
2624 2620
2625 2621 forms = [e for e in editform.split('.') if e]
2626 2622 forms.insert(0, 'changeset')
2627 2623 while forms:
2628 2624 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2629 2625 if tmpl:
2630 2626 committext = buildcommittemplate(repo, ctx, subs, extramsg, tmpl)
2631 2627 break
2632 2628 forms.pop()
2633 2629 else:
2634 2630 committext = buildcommittext(repo, ctx, subs, extramsg)
2635 2631
2636 2632 # run editor in the repository root
2637 2633 olddir = os.getcwd()
2638 2634 os.chdir(repo.root)
2639 2635 text = repo.ui.edit(committext, ctx.user(), ctx.extra(), editform=editform)
2640 2636 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2641 2637 os.chdir(olddir)
2642 2638
2643 2639 if finishdesc:
2644 2640 text = finishdesc(text)
2645 2641 if not text.strip():
2646 2642 raise util.Abort(_("empty commit message"))
2647 2643
2648 2644 return text
2649 2645
2650 2646 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2651 2647 ui = repo.ui
2652 2648 tmpl, mapfile = gettemplate(ui, tmpl, None)
2653 2649
2654 2650 try:
2655 2651 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2656 2652 except SyntaxError, inst:
2657 2653 raise util.Abort(inst.args[0])
2658 2654
2659 2655 for k, v in repo.ui.configitems('committemplate'):
2660 2656 if k != 'changeset':
2661 2657 t.t.cache[k] = v
2662 2658
2663 2659 if not extramsg:
2664 2660 extramsg = '' # ensure that extramsg is string
2665 2661
2666 2662 ui.pushbuffer()
2667 2663 t.show(ctx, extramsg=extramsg)
2668 2664 return ui.popbuffer()
2669 2665
2670 2666 def buildcommittext(repo, ctx, subs, extramsg):
2671 2667 edittext = []
2672 2668 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2673 2669 if ctx.description():
2674 2670 edittext.append(ctx.description())
2675 2671 edittext.append("")
2676 2672 edittext.append("") # Empty line between message and comments.
2677 2673 edittext.append(_("HG: Enter commit message."
2678 2674 " Lines beginning with 'HG:' are removed."))
2679 2675 edittext.append("HG: %s" % extramsg)
2680 2676 edittext.append("HG: --")
2681 2677 edittext.append(_("HG: user: %s") % ctx.user())
2682 2678 if ctx.p2():
2683 2679 edittext.append(_("HG: branch merge"))
2684 2680 if ctx.branch():
2685 2681 edittext.append(_("HG: branch '%s'") % ctx.branch())
2686 2682 if bookmarks.iscurrent(repo):
2687 2683 edittext.append(_("HG: bookmark '%s'") % repo._bookmarkcurrent)
2688 2684 edittext.extend([_("HG: subrepo %s") % s for s in subs])
2689 2685 edittext.extend([_("HG: added %s") % f for f in added])
2690 2686 edittext.extend([_("HG: changed %s") % f for f in modified])
2691 2687 edittext.extend([_("HG: removed %s") % f for f in removed])
2692 2688 if not added and not modified and not removed:
2693 2689 edittext.append(_("HG: no files changed"))
2694 2690 edittext.append("")
2695 2691
2696 2692 return "\n".join(edittext)
2697 2693
2698 2694 def commitstatus(repo, node, branch, bheads=None, opts={}):
2699 2695 ctx = repo[node]
2700 2696 parents = ctx.parents()
2701 2697
2702 2698 if (not opts.get('amend') and bheads and node not in bheads and not
2703 2699 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2704 2700 repo.ui.status(_('created new head\n'))
2705 2701 # The message is not printed for initial roots. For the other
2706 2702 # changesets, it is printed in the following situations:
2707 2703 #
2708 2704 # Par column: for the 2 parents with ...
2709 2705 # N: null or no parent
2710 2706 # B: parent is on another named branch
2711 2707 # C: parent is a regular non head changeset
2712 2708 # H: parent was a branch head of the current branch
2713 2709 # Msg column: whether we print "created new head" message
2714 2710 # In the following, it is assumed that there already exists some
2715 2711 # initial branch heads of the current branch, otherwise nothing is
2716 2712 # printed anyway.
2717 2713 #
2718 2714 # Par Msg Comment
2719 2715 # N N y additional topo root
2720 2716 #
2721 2717 # B N y additional branch root
2722 2718 # C N y additional topo head
2723 2719 # H N n usual case
2724 2720 #
2725 2721 # B B y weird additional branch root
2726 2722 # C B y branch merge
2727 2723 # H B n merge with named branch
2728 2724 #
2729 2725 # C C y additional head from merge
2730 2726 # C H n merge with a head
2731 2727 #
2732 2728 # H H n head merge: head count decreases
2733 2729
2734 2730 if not opts.get('close_branch'):
2735 2731 for r in parents:
2736 2732 if r.closesbranch() and r.branch() == branch:
2737 2733 repo.ui.status(_('reopening closed branch head %d\n') % r)
2738 2734
2739 2735 if repo.ui.debugflag:
2740 2736 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2741 2737 elif repo.ui.verbose:
2742 2738 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2743 2739
2744 2740 def revert(ui, repo, ctx, parents, *pats, **opts):
2745 2741 parent, p2 = parents
2746 2742 node = ctx.node()
2747 2743
2748 2744 mf = ctx.manifest()
2749 2745 if node == p2:
2750 2746 parent = p2
2751 2747 if node == parent:
2752 2748 pmf = mf
2753 2749 else:
2754 2750 pmf = None
2755 2751
2756 2752 # need all matching names in dirstate and manifest of target rev,
2757 2753 # so have to walk both. do not print errors if files exist in one
2758 2754 # but not other.
2759 2755
2760 2756 # `names` is a mapping for all elements in working copy and target revision
2761 2757 # The mapping is in the form:
2762 2758 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2763 2759 names = {}
2764 2760
2765 2761 wlock = repo.wlock()
2766 2762 try:
2767 2763 ## filling of the `names` mapping
2768 2764 # walk dirstate to fill `names`
2769 2765
2770 2766 m = scmutil.match(repo[None], pats, opts)
2771 2767 if not m.always() or node != parent:
2772 2768 m.bad = lambda x, y: False
2773 2769 for abs in repo.walk(m):
2774 2770 names[abs] = m.rel(abs), m.exact(abs)
2775 2771
2776 2772 # walk target manifest to fill `names`
2777 2773
2778 2774 def badfn(path, msg):
2779 2775 if path in names:
2780 2776 return
2781 2777 if path in ctx.substate:
2782 2778 return
2783 2779 path_ = path + '/'
2784 2780 for f in names:
2785 2781 if f.startswith(path_):
2786 2782 return
2787 2783 ui.warn("%s: %s\n" % (m.rel(path), msg))
2788 2784
2789 2785 m = scmutil.match(ctx, pats, opts)
2790 2786 m.bad = badfn
2791 2787 for abs in ctx.walk(m):
2792 2788 if abs not in names:
2793 2789 names[abs] = m.rel(abs), m.exact(abs)
2794 2790
2795 2791 # Find status of all file in `names`.
2796 2792 m = scmutil.matchfiles(repo, names)
2797 2793
2798 2794 changes = repo.status(node1=node, match=m,
2799 2795 unknown=True, ignored=True, clean=True)
2800 2796 else:
2801 2797 changes = repo.status(match=m)
2802 2798 for kind in changes:
2803 2799 for abs in kind:
2804 2800 names[abs] = m.rel(abs), m.exact(abs)
2805 2801
2806 2802 m = scmutil.matchfiles(repo, names)
2807 2803
2808 2804 modified = set(changes.modified)
2809 2805 added = set(changes.added)
2810 2806 removed = set(changes.removed)
2811 2807 _deleted = set(changes.deleted)
2812 2808 unknown = set(changes.unknown)
2813 2809 unknown.update(changes.ignored)
2814 2810 clean = set(changes.clean)
2815 2811 modadded = set()
2816 2812
2817 2813 # split between files known in target manifest and the others
2818 2814 smf = set(mf)
2819 2815
2820 2816 # determine the exact nature of the deleted changesets
2821 2817 deladded = _deleted - smf
2822 2818 deleted = _deleted - deladded
2823 2819
2824 2820 # We need to account for the state of the file in the dirstate,
2825 2821 # even when we revert against something else than parent. This will
2826 2822 # slightly alter the behavior of revert (doing back up or not, delete
2827 2823 # or just forget etc).
2828 2824 if parent == node:
2829 2825 dsmodified = modified
2830 2826 dsadded = added
2831 2827 dsremoved = removed
2832 2828 # store all local modifications, useful later for rename detection
2833 2829 localchanges = dsmodified | dsadded
2834 2830 modified, added, removed = set(), set(), set()
2835 2831 else:
2836 2832 changes = repo.status(node1=parent, match=m)
2837 2833 dsmodified = set(changes.modified)
2838 2834 dsadded = set(changes.added)
2839 2835 dsremoved = set(changes.removed)
2840 2836 # store all local modifications, useful later for rename detection
2841 2837 localchanges = dsmodified | dsadded
2842 2838
2843 2839 # only take into account for removes between wc and target
2844 2840 clean |= dsremoved - removed
2845 2841 dsremoved &= removed
2846 2842 # distinct between dirstate remove and other
2847 2843 removed -= dsremoved
2848 2844
2849 2845 modadded = added & dsmodified
2850 2846 added -= modadded
2851 2847
2852 2848 # tell newly modified apart.
2853 2849 dsmodified &= modified
2854 2850 dsmodified |= modified & dsadded # dirstate added may needs backup
2855 2851 modified -= dsmodified
2856 2852
2857 2853 # We need to wait for some post-processing to update this set
2858 2854 # before making the distinction. The dirstate will be used for
2859 2855 # that purpose.
2860 2856 dsadded = added
2861 2857
2862 2858 # in case of merge, files that are actually added can be reported as
2863 2859 # modified, we need to post process the result
2864 2860 if p2 != nullid:
2865 2861 if pmf is None:
2866 2862 # only need parent manifest in the merge case,
2867 2863 # so do not read by default
2868 2864 pmf = repo[parent].manifest()
2869 2865 mergeadd = dsmodified - set(pmf)
2870 2866 dsadded |= mergeadd
2871 2867 dsmodified -= mergeadd
2872 2868
2873 2869 # if f is a rename, update `names` to also revert the source
2874 2870 cwd = repo.getcwd()
2875 2871 for f in localchanges:
2876 2872 src = repo.dirstate.copied(f)
2877 2873 # XXX should we check for rename down to target node?
2878 2874 if src and src not in names and repo.dirstate[src] == 'r':
2879 2875 dsremoved.add(src)
2880 2876 names[src] = (repo.pathto(src, cwd), True)
2881 2877
2882 2878 # distinguish between file to forget and the other
2883 2879 added = set()
2884 2880 for abs in dsadded:
2885 2881 if repo.dirstate[abs] != 'a':
2886 2882 added.add(abs)
2887 2883 dsadded -= added
2888 2884
2889 2885 for abs in deladded:
2890 2886 if repo.dirstate[abs] == 'a':
2891 2887 dsadded.add(abs)
2892 2888 deladded -= dsadded
2893 2889
2894 2890 # For files marked as removed, we check if an unknown file is present at
2895 2891 # the same path. If a such file exists it may need to be backed up.
2896 2892 # Making the distinction at this stage helps have simpler backup
2897 2893 # logic.
2898 2894 removunk = set()
2899 2895 for abs in removed:
2900 2896 target = repo.wjoin(abs)
2901 2897 if os.path.lexists(target):
2902 2898 removunk.add(abs)
2903 2899 removed -= removunk
2904 2900
2905 2901 dsremovunk = set()
2906 2902 for abs in dsremoved:
2907 2903 target = repo.wjoin(abs)
2908 2904 if os.path.lexists(target):
2909 2905 dsremovunk.add(abs)
2910 2906 dsremoved -= dsremovunk
2911 2907
2912 2908 # action to be actually performed by revert
2913 2909 # (<list of file>, message>) tuple
2914 2910 actions = {'revert': ([], _('reverting %s\n')),
2915 2911 'add': ([], _('adding %s\n')),
2916 2912 'remove': ([], _('removing %s\n')),
2917 2913 'drop': ([], _('removing %s\n')),
2918 2914 'forget': ([], _('forgetting %s\n')),
2919 2915 'undelete': ([], _('undeleting %s\n')),
2920 2916 'noop': (None, _('no changes needed to %s\n')),
2921 2917 'unknown': (None, _('file not managed: %s\n')),
2922 2918 }
2923 2919
2924 2920 # "constant" that convey the backup strategy.
2925 2921 # All set to `discard` if `no-backup` is set do avoid checking
2926 2922 # no_backup lower in the code.
2927 2923 # These values are ordered for comparison purposes
2928 2924 backup = 2 # unconditionally do backup
2929 2925 check = 1 # check if the existing file differs from target
2930 2926 discard = 0 # never do backup
2931 2927 if opts.get('no_backup'):
2932 2928 backup = check = discard
2933 2929
2934 2930 backupanddel = actions['remove']
2935 2931 if not opts.get('no_backup'):
2936 2932 backupanddel = actions['drop']
2937 2933
2938 2934 disptable = (
2939 2935 # dispatch table:
2940 2936 # file state
2941 2937 # action
2942 2938 # make backup
2943 2939
2944 2940 ## Sets that results that will change file on disk
2945 2941 # Modified compared to target, no local change
2946 2942 (modified, actions['revert'], discard),
2947 2943 # Modified compared to target, but local file is deleted
2948 2944 (deleted, actions['revert'], discard),
2949 2945 # Modified compared to target, local change
2950 2946 (dsmodified, actions['revert'], backup),
2951 2947 # Added since target
2952 2948 (added, actions['remove'], discard),
2953 2949 # Added in working directory
2954 2950 (dsadded, actions['forget'], discard),
2955 2951 # Added since target, have local modification
2956 2952 (modadded, backupanddel, backup),
2957 2953 # Added since target but file is missing in working directory
2958 2954 (deladded, actions['drop'], discard),
2959 2955 # Removed since target, before working copy parent
2960 2956 (removed, actions['add'], discard),
2961 2957 # Same as `removed` but an unknown file exists at the same path
2962 2958 (removunk, actions['add'], check),
2963 2959 # Removed since targe, marked as such in working copy parent
2964 2960 (dsremoved, actions['undelete'], discard),
2965 2961 # Same as `dsremoved` but an unknown file exists at the same path
2966 2962 (dsremovunk, actions['undelete'], check),
2967 2963 ## the following sets does not result in any file changes
2968 2964 # File with no modification
2969 2965 (clean, actions['noop'], discard),
2970 2966 # Existing file, not tracked anywhere
2971 2967 (unknown, actions['unknown'], discard),
2972 2968 )
2973 2969
2974 2970 wctx = repo[None]
2975 2971 for abs, (rel, exact) in sorted(names.items()):
2976 2972 # target file to be touch on disk (relative to cwd)
2977 2973 target = repo.wjoin(abs)
2978 2974 # search the entry in the dispatch table.
2979 2975 # if the file is in any of these sets, it was touched in the working
2980 2976 # directory parent and we are sure it needs to be reverted.
2981 2977 for table, (xlist, msg), dobackup in disptable:
2982 2978 if abs not in table:
2983 2979 continue
2984 2980 if xlist is not None:
2985 2981 xlist.append(abs)
2986 2982 if dobackup and (backup <= dobackup
2987 2983 or wctx[abs].cmp(ctx[abs])):
2988 2984 bakname = "%s.orig" % rel
2989 2985 ui.note(_('saving current version of %s as %s\n') %
2990 2986 (rel, bakname))
2991 2987 if not opts.get('dry_run'):
2992 2988 util.rename(target, bakname)
2993 2989 if ui.verbose or not exact:
2994 2990 if not isinstance(msg, basestring):
2995 2991 msg = msg(abs)
2996 2992 ui.status(msg % rel)
2997 2993 elif exact:
2998 2994 ui.warn(msg % rel)
2999 2995 break
3000 2996
3001 2997
3002 2998 if not opts.get('dry_run'):
3003 2999 needdata = ('revert', 'add', 'undelete')
3004 3000 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3005 3001
3006 3002 _performrevert(repo, parents, ctx, actions)
3007 3003
3008 3004 # get the list of subrepos that must be reverted
3009 3005 subrepomatch = scmutil.match(ctx, pats, opts)
3010 3006 targetsubs = sorted(s for s in ctx.substate if subrepomatch(s))
3011 3007
3012 3008 if targetsubs:
3013 3009 # Revert the subrepos on the revert list
3014 3010 for sub in targetsubs:
3015 3011 ctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3016 3012 finally:
3017 3013 wlock.release()
3018 3014
3019 3015 def _revertprefetch(repo, ctx, *files):
3020 3016 """Let extension changing the storage layer prefetch content"""
3021 3017 pass
3022 3018
3023 3019 def _performrevert(repo, parents, ctx, actions):
3024 3020 """function that actually perform all the actions computed for revert
3025 3021
3026 3022 This is an independent function to let extension to plug in and react to
3027 3023 the imminent revert.
3028 3024
3029 3025 Make sure you have the working directory locked when calling this function.
3030 3026 """
3031 3027 parent, p2 = parents
3032 3028 node = ctx.node()
3033 3029 def checkout(f):
3034 3030 fc = ctx[f]
3035 3031 repo.wwrite(f, fc.data(), fc.flags())
3036 3032
3037 3033 audit_path = pathutil.pathauditor(repo.root)
3038 3034 for f in actions['forget'][0]:
3039 3035 repo.dirstate.drop(f)
3040 3036 for f in actions['remove'][0]:
3041 3037 audit_path(f)
3042 3038 util.unlinkpath(repo.wjoin(f))
3043 3039 repo.dirstate.remove(f)
3044 3040 for f in actions['drop'][0]:
3045 3041 audit_path(f)
3046 3042 repo.dirstate.remove(f)
3047 3043
3048 3044 normal = None
3049 3045 if node == parent:
3050 3046 # We're reverting to our parent. If possible, we'd like status
3051 3047 # to report the file as clean. We have to use normallookup for
3052 3048 # merges to avoid losing information about merged/dirty files.
3053 3049 if p2 != nullid:
3054 3050 normal = repo.dirstate.normallookup
3055 3051 else:
3056 3052 normal = repo.dirstate.normal
3057 3053 for f in actions['revert'][0]:
3058 3054 checkout(f)
3059 3055 if normal:
3060 3056 normal(f)
3061 3057
3062 3058 for f in actions['add'][0]:
3063 3059 checkout(f)
3064 3060 repo.dirstate.add(f)
3065 3061
3066 3062 normal = repo.dirstate.normallookup
3067 3063 if node == parent and p2 == nullid:
3068 3064 normal = repo.dirstate.normal
3069 3065 for f in actions['undelete'][0]:
3070 3066 checkout(f)
3071 3067 normal(f)
3072 3068
3073 3069 copied = copies.pathcopies(repo[parent], ctx)
3074 3070
3075 3071 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3076 3072 if f in copied:
3077 3073 repo.dirstate.copy(copied[f], f)
3078 3074
3079 3075 def command(table):
3080 3076 """Returns a function object to be used as a decorator for making commands.
3081 3077
3082 3078 This function receives a command table as its argument. The table should
3083 3079 be a dict.
3084 3080
3085 3081 The returned function can be used as a decorator for adding commands
3086 3082 to that command table. This function accepts multiple arguments to define
3087 3083 a command.
3088 3084
3089 3085 The first argument is the command name.
3090 3086
3091 3087 The options argument is an iterable of tuples defining command arguments.
3092 3088 See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
3093 3089
3094 3090 The synopsis argument defines a short, one line summary of how to use the
3095 3091 command. This shows up in the help output.
3096 3092
3097 3093 The norepo argument defines whether the command does not require a
3098 3094 local repository. Most commands operate against a repository, thus the
3099 3095 default is False.
3100 3096
3101 3097 The optionalrepo argument defines whether the command optionally requires
3102 3098 a local repository.
3103 3099
3104 3100 The inferrepo argument defines whether to try to find a repository from the
3105 3101 command line arguments. If True, arguments will be examined for potential
3106 3102 repository locations. See ``findrepo()``. If a repository is found, it
3107 3103 will be used.
3108 3104 """
3109 3105 def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False,
3110 3106 inferrepo=False):
3111 3107 def decorator(func):
3112 3108 if synopsis:
3113 3109 table[name] = func, list(options), synopsis
3114 3110 else:
3115 3111 table[name] = func, list(options)
3116 3112
3117 3113 if norepo:
3118 3114 # Avoid import cycle.
3119 3115 import commands
3120 3116 commands.norepo += ' %s' % ' '.join(parsealiases(name))
3121 3117
3122 3118 if optionalrepo:
3123 3119 import commands
3124 3120 commands.optionalrepo += ' %s' % ' '.join(parsealiases(name))
3125 3121
3126 3122 if inferrepo:
3127 3123 import commands
3128 3124 commands.inferrepo += ' %s' % ' '.join(parsealiases(name))
3129 3125
3130 3126 return func
3131 3127 return decorator
3132 3128
3133 3129 return cmd
3134 3130
3135 3131 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3136 3132 # commands.outgoing. "missing" is "missing" of the result of
3137 3133 # "findcommonoutgoing()"
3138 3134 outgoinghooks = util.hooks()
3139 3135
3140 3136 # a list of (ui, repo) functions called by commands.summary
3141 3137 summaryhooks = util.hooks()
3142 3138
3143 3139 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3144 3140 #
3145 3141 # functions should return tuple of booleans below, if 'changes' is None:
3146 3142 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3147 3143 #
3148 3144 # otherwise, 'changes' is a tuple of tuples below:
3149 3145 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3150 3146 # - (desturl, destbranch, destpeer, outgoing)
3151 3147 summaryremotehooks = util.hooks()
3152 3148
3153 3149 # A list of state files kept by multistep operations like graft.
3154 3150 # Since graft cannot be aborted, it is considered 'clearable' by update.
3155 3151 # note: bisect is intentionally excluded
3156 3152 # (state file, clearable, allowcommit, error, hint)
3157 3153 unfinishedstates = [
3158 3154 ('graftstate', True, False, _('graft in progress'),
3159 3155 _("use 'hg graft --continue' or 'hg update' to abort")),
3160 3156 ('updatestate', True, False, _('last update was interrupted'),
3161 3157 _("use 'hg update' to get a consistent checkout"))
3162 3158 ]
3163 3159
3164 3160 def checkunfinished(repo, commit=False):
3165 3161 '''Look for an unfinished multistep operation, like graft, and abort
3166 3162 if found. It's probably good to check this right before
3167 3163 bailifchanged().
3168 3164 '''
3169 3165 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3170 3166 if commit and allowcommit:
3171 3167 continue
3172 3168 if repo.vfs.exists(f):
3173 3169 raise util.Abort(msg, hint=hint)
3174 3170
3175 3171 def clearunfinished(repo):
3176 3172 '''Check for unfinished operations (as above), and clear the ones
3177 3173 that are clearable.
3178 3174 '''
3179 3175 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3180 3176 if not clearable and repo.vfs.exists(f):
3181 3177 raise util.Abort(msg, hint=hint)
3182 3178 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3183 3179 if clearable and repo.vfs.exists(f):
3184 3180 util.unlink(repo.join(f))
@@ -1,2396 +1,2401 b''
1 1 # patch.py - patch file parsing routines
2 2 #
3 3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
4 4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 import cStringIO, email, os, errno, re, posixpath, copy
10 10 import tempfile, zlib, shutil
11 11 # On python2.4 you have to import these by name or they fail to
12 12 # load. This was not a problem on Python 2.7.
13 13 import email.Generator
14 14 import email.Parser
15 15
16 16 from i18n import _
17 17 from node import hex, short
18 import cStringIO
18 19 import base85, mdiff, scmutil, util, diffhelpers, copies, encoding, error
19 20
20 21 gitre = re.compile('diff --git a/(.*) b/(.*)')
21 22 tabsplitter = re.compile(r'(\t+|[^\t]+)')
22 23
23 24 class PatchError(Exception):
24 25 pass
25 26
26 27
27 28 # public functions
28 29
29 30 def split(stream):
30 31 '''return an iterator of individual patches from a stream'''
31 32 def isheader(line, inheader):
32 33 if inheader and line[0] in (' ', '\t'):
33 34 # continuation
34 35 return True
35 36 if line[0] in (' ', '-', '+'):
36 37 # diff line - don't check for header pattern in there
37 38 return False
38 39 l = line.split(': ', 1)
39 40 return len(l) == 2 and ' ' not in l[0]
40 41
41 42 def chunk(lines):
42 43 return cStringIO.StringIO(''.join(lines))
43 44
44 45 def hgsplit(stream, cur):
45 46 inheader = True
46 47
47 48 for line in stream:
48 49 if not line.strip():
49 50 inheader = False
50 51 if not inheader and line.startswith('# HG changeset patch'):
51 52 yield chunk(cur)
52 53 cur = []
53 54 inheader = True
54 55
55 56 cur.append(line)
56 57
57 58 if cur:
58 59 yield chunk(cur)
59 60
60 61 def mboxsplit(stream, cur):
61 62 for line in stream:
62 63 if line.startswith('From '):
63 64 for c in split(chunk(cur[1:])):
64 65 yield c
65 66 cur = []
66 67
67 68 cur.append(line)
68 69
69 70 if cur:
70 71 for c in split(chunk(cur[1:])):
71 72 yield c
72 73
73 74 def mimesplit(stream, cur):
74 75 def msgfp(m):
75 76 fp = cStringIO.StringIO()
76 77 g = email.Generator.Generator(fp, mangle_from_=False)
77 78 g.flatten(m)
78 79 fp.seek(0)
79 80 return fp
80 81
81 82 for line in stream:
82 83 cur.append(line)
83 84 c = chunk(cur)
84 85
85 86 m = email.Parser.Parser().parse(c)
86 87 if not m.is_multipart():
87 88 yield msgfp(m)
88 89 else:
89 90 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
90 91 for part in m.walk():
91 92 ct = part.get_content_type()
92 93 if ct not in ok_types:
93 94 continue
94 95 yield msgfp(part)
95 96
96 97 def headersplit(stream, cur):
97 98 inheader = False
98 99
99 100 for line in stream:
100 101 if not inheader and isheader(line, inheader):
101 102 yield chunk(cur)
102 103 cur = []
103 104 inheader = True
104 105 if inheader and not isheader(line, inheader):
105 106 inheader = False
106 107
107 108 cur.append(line)
108 109
109 110 if cur:
110 111 yield chunk(cur)
111 112
112 113 def remainder(cur):
113 114 yield chunk(cur)
114 115
115 116 class fiter(object):
116 117 def __init__(self, fp):
117 118 self.fp = fp
118 119
119 120 def __iter__(self):
120 121 return self
121 122
122 123 def next(self):
123 124 l = self.fp.readline()
124 125 if not l:
125 126 raise StopIteration
126 127 return l
127 128
128 129 inheader = False
129 130 cur = []
130 131
131 132 mimeheaders = ['content-type']
132 133
133 134 if not util.safehasattr(stream, 'next'):
134 135 # http responses, for example, have readline but not next
135 136 stream = fiter(stream)
136 137
137 138 for line in stream:
138 139 cur.append(line)
139 140 if line.startswith('# HG changeset patch'):
140 141 return hgsplit(stream, cur)
141 142 elif line.startswith('From '):
142 143 return mboxsplit(stream, cur)
143 144 elif isheader(line, inheader):
144 145 inheader = True
145 146 if line.split(':', 1)[0].lower() in mimeheaders:
146 147 # let email parser handle this
147 148 return mimesplit(stream, cur)
148 149 elif line.startswith('--- ') and inheader:
149 150 # No evil headers seen by diff start, split by hand
150 151 return headersplit(stream, cur)
151 152 # Not enough info, keep reading
152 153
153 154 # if we are here, we have a very plain patch
154 155 return remainder(cur)
155 156
156 157 def extract(ui, fileobj):
157 158 '''extract patch from data read from fileobj.
158 159
159 160 patch can be a normal patch or contained in an email message.
160 161
161 162 return tuple (filename, message, user, date, branch, node, p1, p2).
162 163 Any item in the returned tuple can be None. If filename is None,
163 164 fileobj did not contain a patch. Caller must unlink filename when done.'''
164 165
165 166 # attempt to detect the start of a patch
166 167 # (this heuristic is borrowed from quilt)
167 168 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |'
168 169 r'retrieving revision [0-9]+(\.[0-9]+)*$|'
169 170 r'---[ \t].*?^\+\+\+[ \t]|'
170 171 r'\*\*\*[ \t].*?^---[ \t])', re.MULTILINE|re.DOTALL)
171 172
172 173 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
173 174 tmpfp = os.fdopen(fd, 'w')
174 175 try:
175 176 msg = email.Parser.Parser().parse(fileobj)
176 177
177 178 subject = msg['Subject']
178 179 user = msg['From']
179 180 if not subject and not user:
180 181 # Not an email, restore parsed headers if any
181 182 subject = '\n'.join(': '.join(h) for h in msg.items()) + '\n'
182 183
183 184 # should try to parse msg['Date']
184 185 date = None
185 186 nodeid = None
186 187 branch = None
187 188 parents = []
188 189
189 190 if subject:
190 191 if subject.startswith('[PATCH'):
191 192 pend = subject.find(']')
192 193 if pend >= 0:
193 194 subject = subject[pend + 1:].lstrip()
194 195 subject = re.sub(r'\n[ \t]+', ' ', subject)
195 196 ui.debug('Subject: %s\n' % subject)
196 197 if user:
197 198 ui.debug('From: %s\n' % user)
198 199 diffs_seen = 0
199 200 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
200 201 message = ''
201 202 for part in msg.walk():
202 203 content_type = part.get_content_type()
203 204 ui.debug('Content-Type: %s\n' % content_type)
204 205 if content_type not in ok_types:
205 206 continue
206 207 payload = part.get_payload(decode=True)
207 208 m = diffre.search(payload)
208 209 if m:
209 210 hgpatch = False
210 211 hgpatchheader = False
211 212 ignoretext = False
212 213
213 214 ui.debug('found patch at byte %d\n' % m.start(0))
214 215 diffs_seen += 1
215 216 cfp = cStringIO.StringIO()
216 217 for line in payload[:m.start(0)].splitlines():
217 218 if line.startswith('# HG changeset patch') and not hgpatch:
218 219 ui.debug('patch generated by hg export\n')
219 220 hgpatch = True
220 221 hgpatchheader = True
221 222 # drop earlier commit message content
222 223 cfp.seek(0)
223 224 cfp.truncate()
224 225 subject = None
225 226 elif hgpatchheader:
226 227 if line.startswith('# User '):
227 228 user = line[7:]
228 229 ui.debug('From: %s\n' % user)
229 230 elif line.startswith("# Date "):
230 231 date = line[7:]
231 232 elif line.startswith("# Branch "):
232 233 branch = line[9:]
233 234 elif line.startswith("# Node ID "):
234 235 nodeid = line[10:]
235 236 elif line.startswith("# Parent "):
236 237 parents.append(line[9:].lstrip())
237 238 elif not line.startswith("# "):
238 239 hgpatchheader = False
239 240 elif line == '---':
240 241 ignoretext = True
241 242 if not hgpatchheader and not ignoretext:
242 243 cfp.write(line)
243 244 cfp.write('\n')
244 245 message = cfp.getvalue()
245 246 if tmpfp:
246 247 tmpfp.write(payload)
247 248 if not payload.endswith('\n'):
248 249 tmpfp.write('\n')
249 250 elif not diffs_seen and message and content_type == 'text/plain':
250 251 message += '\n' + payload
251 252 except: # re-raises
252 253 tmpfp.close()
253 254 os.unlink(tmpname)
254 255 raise
255 256
256 257 if subject and not message.startswith(subject):
257 258 message = '%s\n%s' % (subject, message)
258 259 tmpfp.close()
259 260 if not diffs_seen:
260 261 os.unlink(tmpname)
261 262 return None, message, user, date, branch, None, None, None
262 263
263 264 if parents:
264 265 p1 = parents.pop(0)
265 266 else:
266 267 p1 = None
267 268
268 269 if parents:
269 270 p2 = parents.pop(0)
270 271 else:
271 272 p2 = None
272 273
273 274 return tmpname, message, user, date, branch, nodeid, p1, p2
274 275
275 276 class patchmeta(object):
276 277 """Patched file metadata
277 278
278 279 'op' is the performed operation within ADD, DELETE, RENAME, MODIFY
279 280 or COPY. 'path' is patched file path. 'oldpath' is set to the
280 281 origin file when 'op' is either COPY or RENAME, None otherwise. If
281 282 file mode is changed, 'mode' is a tuple (islink, isexec) where
282 283 'islink' is True if the file is a symlink and 'isexec' is True if
283 284 the file is executable. Otherwise, 'mode' is None.
284 285 """
285 286 def __init__(self, path):
286 287 self.path = path
287 288 self.oldpath = None
288 289 self.mode = None
289 290 self.op = 'MODIFY'
290 291 self.binary = False
291 292
292 293 def setmode(self, mode):
293 294 islink = mode & 020000
294 295 isexec = mode & 0100
295 296 self.mode = (islink, isexec)
296 297
297 298 def copy(self):
298 299 other = patchmeta(self.path)
299 300 other.oldpath = self.oldpath
300 301 other.mode = self.mode
301 302 other.op = self.op
302 303 other.binary = self.binary
303 304 return other
304 305
305 306 def _ispatchinga(self, afile):
306 307 if afile == '/dev/null':
307 308 return self.op == 'ADD'
308 309 return afile == 'a/' + (self.oldpath or self.path)
309 310
310 311 def _ispatchingb(self, bfile):
311 312 if bfile == '/dev/null':
312 313 return self.op == 'DELETE'
313 314 return bfile == 'b/' + self.path
314 315
315 316 def ispatching(self, afile, bfile):
316 317 return self._ispatchinga(afile) and self._ispatchingb(bfile)
317 318
318 319 def __repr__(self):
319 320 return "<patchmeta %s %r>" % (self.op, self.path)
320 321
321 322 def readgitpatch(lr):
322 323 """extract git-style metadata about patches from <patchname>"""
323 324
324 325 # Filter patch for git information
325 326 gp = None
326 327 gitpatches = []
327 328 for line in lr:
328 329 line = line.rstrip(' \r\n')
329 330 if line.startswith('diff --git a/'):
330 331 m = gitre.match(line)
331 332 if m:
332 333 if gp:
333 334 gitpatches.append(gp)
334 335 dst = m.group(2)
335 336 gp = patchmeta(dst)
336 337 elif gp:
337 338 if line.startswith('--- '):
338 339 gitpatches.append(gp)
339 340 gp = None
340 341 continue
341 342 if line.startswith('rename from '):
342 343 gp.op = 'RENAME'
343 344 gp.oldpath = line[12:]
344 345 elif line.startswith('rename to '):
345 346 gp.path = line[10:]
346 347 elif line.startswith('copy from '):
347 348 gp.op = 'COPY'
348 349 gp.oldpath = line[10:]
349 350 elif line.startswith('copy to '):
350 351 gp.path = line[8:]
351 352 elif line.startswith('deleted file'):
352 353 gp.op = 'DELETE'
353 354 elif line.startswith('new file mode '):
354 355 gp.op = 'ADD'
355 356 gp.setmode(int(line[-6:], 8))
356 357 elif line.startswith('new mode '):
357 358 gp.setmode(int(line[-6:], 8))
358 359 elif line.startswith('GIT binary patch'):
359 360 gp.binary = True
360 361 if gp:
361 362 gitpatches.append(gp)
362 363
363 364 return gitpatches
364 365
365 366 class linereader(object):
366 367 # simple class to allow pushing lines back into the input stream
367 368 def __init__(self, fp):
368 369 self.fp = fp
369 370 self.buf = []
370 371
371 372 def push(self, line):
372 373 if line is not None:
373 374 self.buf.append(line)
374 375
375 376 def readline(self):
376 377 if self.buf:
377 378 l = self.buf[0]
378 379 del self.buf[0]
379 380 return l
380 381 return self.fp.readline()
381 382
382 383 def __iter__(self):
383 384 while True:
384 385 l = self.readline()
385 386 if not l:
386 387 break
387 388 yield l
388 389
389 390 class abstractbackend(object):
390 391 def __init__(self, ui):
391 392 self.ui = ui
392 393
393 394 def getfile(self, fname):
394 395 """Return target file data and flags as a (data, (islink,
395 396 isexec)) tuple. Data is None if file is missing/deleted.
396 397 """
397 398 raise NotImplementedError
398 399
399 400 def setfile(self, fname, data, mode, copysource):
400 401 """Write data to target file fname and set its mode. mode is a
401 402 (islink, isexec) tuple. If data is None, the file content should
402 403 be left unchanged. If the file is modified after being copied,
403 404 copysource is set to the original file name.
404 405 """
405 406 raise NotImplementedError
406 407
407 408 def unlink(self, fname):
408 409 """Unlink target file."""
409 410 raise NotImplementedError
410 411
411 412 def writerej(self, fname, failed, total, lines):
412 413 """Write rejected lines for fname. total is the number of hunks
413 414 which failed to apply and total the total number of hunks for this
414 415 files.
415 416 """
416 417 pass
417 418
418 419 def exists(self, fname):
419 420 raise NotImplementedError
420 421
421 422 class fsbackend(abstractbackend):
422 423 def __init__(self, ui, basedir):
423 424 super(fsbackend, self).__init__(ui)
424 425 self.opener = scmutil.opener(basedir)
425 426
426 427 def _join(self, f):
427 428 return os.path.join(self.opener.base, f)
428 429
429 430 def getfile(self, fname):
430 431 if self.opener.islink(fname):
431 432 return (self.opener.readlink(fname), (True, False))
432 433
433 434 isexec = False
434 435 try:
435 436 isexec = self.opener.lstat(fname).st_mode & 0100 != 0
436 437 except OSError, e:
437 438 if e.errno != errno.ENOENT:
438 439 raise
439 440 try:
440 441 return (self.opener.read(fname), (False, isexec))
441 442 except IOError, e:
442 443 if e.errno != errno.ENOENT:
443 444 raise
444 445 return None, None
445 446
446 447 def setfile(self, fname, data, mode, copysource):
447 448 islink, isexec = mode
448 449 if data is None:
449 450 self.opener.setflags(fname, islink, isexec)
450 451 return
451 452 if islink:
452 453 self.opener.symlink(data, fname)
453 454 else:
454 455 self.opener.write(fname, data)
455 456 if isexec:
456 457 self.opener.setflags(fname, False, True)
457 458
458 459 def unlink(self, fname):
459 460 self.opener.unlinkpath(fname, ignoremissing=True)
460 461
461 462 def writerej(self, fname, failed, total, lines):
462 463 fname = fname + ".rej"
463 464 self.ui.warn(
464 465 _("%d out of %d hunks FAILED -- saving rejects to file %s\n") %
465 466 (failed, total, fname))
466 467 fp = self.opener(fname, 'w')
467 468 fp.writelines(lines)
468 469 fp.close()
469 470
470 471 def exists(self, fname):
471 472 return self.opener.lexists(fname)
472 473
473 474 class workingbackend(fsbackend):
474 475 def __init__(self, ui, repo, similarity):
475 476 super(workingbackend, self).__init__(ui, repo.root)
476 477 self.repo = repo
477 478 self.similarity = similarity
478 479 self.removed = set()
479 480 self.changed = set()
480 481 self.copied = []
481 482
482 483 def _checkknown(self, fname):
483 484 if self.repo.dirstate[fname] == '?' and self.exists(fname):
484 485 raise PatchError(_('cannot patch %s: file is not tracked') % fname)
485 486
486 487 def setfile(self, fname, data, mode, copysource):
487 488 self._checkknown(fname)
488 489 super(workingbackend, self).setfile(fname, data, mode, copysource)
489 490 if copysource is not None:
490 491 self.copied.append((copysource, fname))
491 492 self.changed.add(fname)
492 493
493 494 def unlink(self, fname):
494 495 self._checkknown(fname)
495 496 super(workingbackend, self).unlink(fname)
496 497 self.removed.add(fname)
497 498 self.changed.add(fname)
498 499
499 500 def close(self):
500 501 wctx = self.repo[None]
501 502 changed = set(self.changed)
502 503 for src, dst in self.copied:
503 504 scmutil.dirstatecopy(self.ui, self.repo, wctx, src, dst)
504 505 if self.removed:
505 506 wctx.forget(sorted(self.removed))
506 507 for f in self.removed:
507 508 if f not in self.repo.dirstate:
508 509 # File was deleted and no longer belongs to the
509 510 # dirstate, it was probably marked added then
510 511 # deleted, and should not be considered by
511 512 # marktouched().
512 513 changed.discard(f)
513 514 if changed:
514 515 scmutil.marktouched(self.repo, changed, self.similarity)
515 516 return sorted(self.changed)
516 517
517 518 class filestore(object):
518 519 def __init__(self, maxsize=None):
519 520 self.opener = None
520 521 self.files = {}
521 522 self.created = 0
522 523 self.maxsize = maxsize
523 524 if self.maxsize is None:
524 525 self.maxsize = 4*(2**20)
525 526 self.size = 0
526 527 self.data = {}
527 528
528 529 def setfile(self, fname, data, mode, copied=None):
529 530 if self.maxsize < 0 or (len(data) + self.size) <= self.maxsize:
530 531 self.data[fname] = (data, mode, copied)
531 532 self.size += len(data)
532 533 else:
533 534 if self.opener is None:
534 535 root = tempfile.mkdtemp(prefix='hg-patch-')
535 536 self.opener = scmutil.opener(root)
536 537 # Avoid filename issues with these simple names
537 538 fn = str(self.created)
538 539 self.opener.write(fn, data)
539 540 self.created += 1
540 541 self.files[fname] = (fn, mode, copied)
541 542
542 543 def getfile(self, fname):
543 544 if fname in self.data:
544 545 return self.data[fname]
545 546 if not self.opener or fname not in self.files:
546 547 return None, None, None
547 548 fn, mode, copied = self.files[fname]
548 549 return self.opener.read(fn), mode, copied
549 550
550 551 def close(self):
551 552 if self.opener:
552 553 shutil.rmtree(self.opener.base)
553 554
554 555 class repobackend(abstractbackend):
555 556 def __init__(self, ui, repo, ctx, store):
556 557 super(repobackend, self).__init__(ui)
557 558 self.repo = repo
558 559 self.ctx = ctx
559 560 self.store = store
560 561 self.changed = set()
561 562 self.removed = set()
562 563 self.copied = {}
563 564
564 565 def _checkknown(self, fname):
565 566 if fname not in self.ctx:
566 567 raise PatchError(_('cannot patch %s: file is not tracked') % fname)
567 568
568 569 def getfile(self, fname):
569 570 try:
570 571 fctx = self.ctx[fname]
571 572 except error.LookupError:
572 573 return None, None
573 574 flags = fctx.flags()
574 575 return fctx.data(), ('l' in flags, 'x' in flags)
575 576
576 577 def setfile(self, fname, data, mode, copysource):
577 578 if copysource:
578 579 self._checkknown(copysource)
579 580 if data is None:
580 581 data = self.ctx[fname].data()
581 582 self.store.setfile(fname, data, mode, copysource)
582 583 self.changed.add(fname)
583 584 if copysource:
584 585 self.copied[fname] = copysource
585 586
586 587 def unlink(self, fname):
587 588 self._checkknown(fname)
588 589 self.removed.add(fname)
589 590
590 591 def exists(self, fname):
591 592 return fname in self.ctx
592 593
593 594 def close(self):
594 595 return self.changed | self.removed
595 596
596 597 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
597 598 unidesc = re.compile('@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@')
598 599 contextdesc = re.compile('(?:---|\*\*\*) (\d+)(?:,(\d+))? (?:---|\*\*\*)')
599 600 eolmodes = ['strict', 'crlf', 'lf', 'auto']
600 601
601 602 class patchfile(object):
602 603 def __init__(self, ui, gp, backend, store, eolmode='strict'):
603 604 self.fname = gp.path
604 605 self.eolmode = eolmode
605 606 self.eol = None
606 607 self.backend = backend
607 608 self.ui = ui
608 609 self.lines = []
609 610 self.exists = False
610 611 self.missing = True
611 612 self.mode = gp.mode
612 613 self.copysource = gp.oldpath
613 614 self.create = gp.op in ('ADD', 'COPY', 'RENAME')
614 615 self.remove = gp.op == 'DELETE'
615 616 if self.copysource is None:
616 617 data, mode = backend.getfile(self.fname)
617 618 else:
618 619 data, mode = store.getfile(self.copysource)[:2]
619 620 if data is not None:
620 621 self.exists = self.copysource is None or backend.exists(self.fname)
621 622 self.missing = False
622 623 if data:
623 624 self.lines = mdiff.splitnewlines(data)
624 625 if self.mode is None:
625 626 self.mode = mode
626 627 if self.lines:
627 628 # Normalize line endings
628 629 if self.lines[0].endswith('\r\n'):
629 630 self.eol = '\r\n'
630 631 elif self.lines[0].endswith('\n'):
631 632 self.eol = '\n'
632 633 if eolmode != 'strict':
633 634 nlines = []
634 635 for l in self.lines:
635 636 if l.endswith('\r\n'):
636 637 l = l[:-2] + '\n'
637 638 nlines.append(l)
638 639 self.lines = nlines
639 640 else:
640 641 if self.create:
641 642 self.missing = False
642 643 if self.mode is None:
643 644 self.mode = (False, False)
644 645 if self.missing:
645 646 self.ui.warn(_("unable to find '%s' for patching\n") % self.fname)
646 647
647 648 self.hash = {}
648 649 self.dirty = 0
649 650 self.offset = 0
650 651 self.skew = 0
651 652 self.rej = []
652 653 self.fileprinted = False
653 654 self.printfile(False)
654 655 self.hunks = 0
655 656
656 657 def writelines(self, fname, lines, mode):
657 658 if self.eolmode == 'auto':
658 659 eol = self.eol
659 660 elif self.eolmode == 'crlf':
660 661 eol = '\r\n'
661 662 else:
662 663 eol = '\n'
663 664
664 665 if self.eolmode != 'strict' and eol and eol != '\n':
665 666 rawlines = []
666 667 for l in lines:
667 668 if l and l[-1] == '\n':
668 669 l = l[:-1] + eol
669 670 rawlines.append(l)
670 671 lines = rawlines
671 672
672 673 self.backend.setfile(fname, ''.join(lines), mode, self.copysource)
673 674
674 675 def printfile(self, warn):
675 676 if self.fileprinted:
676 677 return
677 678 if warn or self.ui.verbose:
678 679 self.fileprinted = True
679 680 s = _("patching file %s\n") % self.fname
680 681 if warn:
681 682 self.ui.warn(s)
682 683 else:
683 684 self.ui.note(s)
684 685
685 686
686 687 def findlines(self, l, linenum):
687 688 # looks through the hash and finds candidate lines. The
688 689 # result is a list of line numbers sorted based on distance
689 690 # from linenum
690 691
691 692 cand = self.hash.get(l, [])
692 693 if len(cand) > 1:
693 694 # resort our list of potentials forward then back.
694 695 cand.sort(key=lambda x: abs(x - linenum))
695 696 return cand
696 697
697 698 def write_rej(self):
698 699 # our rejects are a little different from patch(1). This always
699 700 # creates rejects in the same form as the original patch. A file
700 701 # header is inserted so that you can run the reject through patch again
701 702 # without having to type the filename.
702 703 if not self.rej:
703 704 return
704 705 base = os.path.basename(self.fname)
705 706 lines = ["--- %s\n+++ %s\n" % (base, base)]
706 707 for x in self.rej:
707 708 for l in x.hunk:
708 709 lines.append(l)
709 710 if l[-1] != '\n':
710 711 lines.append("\n\ No newline at end of file\n")
711 712 self.backend.writerej(self.fname, len(self.rej), self.hunks, lines)
712 713
713 714 def apply(self, h):
714 715 if not h.complete():
715 716 raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
716 717 (h.number, h.desc, len(h.a), h.lena, len(h.b),
717 718 h.lenb))
718 719
719 720 self.hunks += 1
720 721
721 722 if self.missing:
722 723 self.rej.append(h)
723 724 return -1
724 725
725 726 if self.exists and self.create:
726 727 if self.copysource:
727 728 self.ui.warn(_("cannot create %s: destination already "
728 729 "exists\n") % self.fname)
729 730 else:
730 731 self.ui.warn(_("file %s already exists\n") % self.fname)
731 732 self.rej.append(h)
732 733 return -1
733 734
734 735 if isinstance(h, binhunk):
735 736 if self.remove:
736 737 self.backend.unlink(self.fname)
737 738 else:
738 739 l = h.new(self.lines)
739 740 self.lines[:] = l
740 741 self.offset += len(l)
741 742 self.dirty = True
742 743 return 0
743 744
744 745 horig = h
745 746 if (self.eolmode in ('crlf', 'lf')
746 747 or self.eolmode == 'auto' and self.eol):
747 748 # If new eols are going to be normalized, then normalize
748 749 # hunk data before patching. Otherwise, preserve input
749 750 # line-endings.
750 751 h = h.getnormalized()
751 752
752 753 # fast case first, no offsets, no fuzz
753 754 old, oldstart, new, newstart = h.fuzzit(0, False)
754 755 oldstart += self.offset
755 756 orig_start = oldstart
756 757 # if there's skew we want to emit the "(offset %d lines)" even
757 758 # when the hunk cleanly applies at start + skew, so skip the
758 759 # fast case code
759 760 if (self.skew == 0 and
760 761 diffhelpers.testhunk(old, self.lines, oldstart) == 0):
761 762 if self.remove:
762 763 self.backend.unlink(self.fname)
763 764 else:
764 765 self.lines[oldstart:oldstart + len(old)] = new
765 766 self.offset += len(new) - len(old)
766 767 self.dirty = True
767 768 return 0
768 769
769 770 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
770 771 self.hash = {}
771 772 for x, s in enumerate(self.lines):
772 773 self.hash.setdefault(s, []).append(x)
773 774
774 775 for fuzzlen in xrange(3):
775 776 for toponly in [True, False]:
776 777 old, oldstart, new, newstart = h.fuzzit(fuzzlen, toponly)
777 778 oldstart = oldstart + self.offset + self.skew
778 779 oldstart = min(oldstart, len(self.lines))
779 780 if old:
780 781 cand = self.findlines(old[0][1:], oldstart)
781 782 else:
782 783 # Only adding lines with no or fuzzed context, just
783 784 # take the skew in account
784 785 cand = [oldstart]
785 786
786 787 for l in cand:
787 788 if not old or diffhelpers.testhunk(old, self.lines, l) == 0:
788 789 self.lines[l : l + len(old)] = new
789 790 self.offset += len(new) - len(old)
790 791 self.skew = l - orig_start
791 792 self.dirty = True
792 793 offset = l - orig_start - fuzzlen
793 794 if fuzzlen:
794 795 msg = _("Hunk #%d succeeded at %d "
795 796 "with fuzz %d "
796 797 "(offset %d lines).\n")
797 798 self.printfile(True)
798 799 self.ui.warn(msg %
799 800 (h.number, l + 1, fuzzlen, offset))
800 801 else:
801 802 msg = _("Hunk #%d succeeded at %d "
802 803 "(offset %d lines).\n")
803 804 self.ui.note(msg % (h.number, l + 1, offset))
804 805 return fuzzlen
805 806 self.printfile(True)
806 807 self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
807 808 self.rej.append(horig)
808 809 return -1
809 810
810 811 def close(self):
811 812 if self.dirty:
812 813 self.writelines(self.fname, self.lines, self.mode)
813 814 self.write_rej()
814 815 return len(self.rej)
815 816
816 817 class header(object):
817 818 """patch header
818 819 """
819 820 diffgit_re = re.compile('diff --git a/(.*) b/(.*)$')
820 821 diff_re = re.compile('diff -r .* (.*)$')
821 822 allhunks_re = re.compile('(?:index|deleted file) ')
822 823 pretty_re = re.compile('(?:new file|deleted file) ')
823 824 special_re = re.compile('(?:index|new|deleted|copy|rename) ')
824 825
825 826 def __init__(self, header):
826 827 self.header = header
827 828 self.hunks = []
828 829
829 830 def binary(self):
830 831 return util.any(h.startswith('index ') for h in self.header)
831 832
832 833 def pretty(self, fp):
833 834 for h in self.header:
834 835 if h.startswith('index '):
835 836 fp.write(_('this modifies a binary file (all or nothing)\n'))
836 837 break
837 838 if self.pretty_re.match(h):
838 839 fp.write(h)
839 840 if self.binary():
840 841 fp.write(_('this is a binary file\n'))
841 842 break
842 843 if h.startswith('---'):
843 844 fp.write(_('%d hunks, %d lines changed\n') %
844 845 (len(self.hunks),
845 846 sum([max(h.added, h.removed) for h in self.hunks])))
846 847 break
847 848 fp.write(h)
848 849
849 850 def write(self, fp):
850 851 fp.write(''.join(self.header))
851 852
852 853 def allhunks(self):
853 854 return util.any(self.allhunks_re.match(h) for h in self.header)
854 855
855 856 def files(self):
856 857 match = self.diffgit_re.match(self.header[0])
857 858 if match:
858 859 fromfile, tofile = match.groups()
859 860 if fromfile == tofile:
860 861 return [fromfile]
861 862 return [fromfile, tofile]
862 863 else:
863 864 return self.diff_re.match(self.header[0]).groups()
864 865
865 866 def filename(self):
866 867 return self.files()[-1]
867 868
868 869 def __repr__(self):
869 870 return '<header %s>' % (' '.join(map(repr, self.files())))
870 871
871 872 def special(self):
872 873 return util.any(self.special_re.match(h) for h in self.header)
873 874
874 875 class recordhunk(object):
875 876 """patch hunk
876 877
877 878 XXX shouldn't we merge this with the other hunk class?
878 879 """
879 880 maxcontext = 3
880 881
881 882 def __init__(self, header, fromline, toline, proc, before, hunk, after):
882 883 def trimcontext(number, lines):
883 884 delta = len(lines) - self.maxcontext
884 885 if False and delta > 0:
885 886 return number + delta, lines[:self.maxcontext]
886 887 return number, lines
887 888
888 889 self.header = header
889 890 self.fromline, self.before = trimcontext(fromline, before)
890 891 self.toline, self.after = trimcontext(toline, after)
891 892 self.proc = proc
892 893 self.hunk = hunk
893 894 self.added, self.removed = self.countchanges(self.hunk)
894 895
895 896 def countchanges(self, hunk):
896 897 """hunk -> (n+,n-)"""
897 898 add = len([h for h in hunk if h[0] == '+'])
898 899 rem = len([h for h in hunk if h[0] == '-'])
899 900 return add, rem
900 901
901 902 def write(self, fp):
902 903 delta = len(self.before) + len(self.after)
903 904 if self.after and self.after[-1] == '\\ No newline at end of file\n':
904 905 delta -= 1
905 906 fromlen = delta + self.removed
906 907 tolen = delta + self.added
907 908 fp.write('@@ -%d,%d +%d,%d @@%s\n' %
908 909 (self.fromline, fromlen, self.toline, tolen,
909 910 self.proc and (' ' + self.proc)))
910 911 fp.write(''.join(self.before + self.hunk + self.after))
911 912
912 913 pretty = write
913 914
914 915 def filename(self):
915 916 return self.header.filename()
916 917
917 918 def __repr__(self):
918 919 return '<hunk %r@%d>' % (self.filename(), self.fromline)
919 920
920 921 def filterpatch(ui, headers):
921 922 """Interactively filter patch chunks into applied-only chunks"""
922 923
923 924 def prompt(skipfile, skipall, query, chunk):
924 925 """prompt query, and process base inputs
925 926
926 927 - y/n for the rest of file
927 928 - y/n for the rest
928 929 - ? (help)
929 930 - q (quit)
930 931
931 932 Return True/False and possibly updated skipfile and skipall.
932 933 """
933 934 newpatches = None
934 935 if skipall is not None:
935 936 return skipall, skipfile, skipall, newpatches
936 937 if skipfile is not None:
937 938 return skipfile, skipfile, skipall, newpatches
938 939 while True:
939 940 resps = _('[Ynesfdaq?]'
940 941 '$$ &Yes, record this change'
941 942 '$$ &No, skip this change'
942 943 '$$ &Edit this change manually'
943 944 '$$ &Skip remaining changes to this file'
944 945 '$$ Record remaining changes to this &file'
945 946 '$$ &Done, skip remaining changes and files'
946 947 '$$ Record &all changes to all remaining files'
947 948 '$$ &Quit, recording no changes'
948 949 '$$ &? (display help)')
949 950 r = ui.promptchoice("%s %s" % (query, resps))
950 951 ui.write("\n")
951 952 if r == 8: # ?
952 953 for c, t in ui.extractchoices(resps)[1]:
953 954 ui.write('%s - %s\n' % (c, t.lower()))
954 955 continue
955 956 elif r == 0: # yes
956 957 ret = True
957 958 elif r == 1: # no
958 959 ret = False
959 960 elif r == 2: # Edit patch
960 961 if chunk is None:
961 962 ui.write(_('cannot edit patch for whole file'))
962 963 ui.write("\n")
963 964 continue
964 965 if chunk.header.binary():
965 966 ui.write(_('cannot edit patch for binary file'))
966 967 ui.write("\n")
967 968 continue
968 969 # Patch comment based on the Git one (based on comment at end of
969 970 # http://mercurial.selenic.com/wiki/RecordExtension)
970 971 phelp = '---' + _("""
971 972 To remove '-' lines, make them ' ' lines (context).
972 973 To remove '+' lines, delete them.
973 974 Lines starting with # will be removed from the patch.
974 975
975 976 If the patch applies cleanly, the edited hunk will immediately be
976 977 added to the record list. If it does not apply cleanly, a rejects
977 978 file will be generated: you can use that when you try again. If
978 979 all lines of the hunk are removed, then the edit is aborted and
979 980 the hunk is left unchanged.
980 981 """)
981 982 (patchfd, patchfn) = tempfile.mkstemp(prefix="hg-editor-",
982 983 suffix=".diff", text=True)
983 984 ncpatchfp = None
984 985 try:
985 986 # Write the initial patch
986 987 f = os.fdopen(patchfd, "w")
987 988 chunk.header.write(f)
988 989 chunk.write(f)
989 990 f.write('\n'.join(['# ' + i for i in phelp.splitlines()]))
990 991 f.close()
991 992 # Start the editor and wait for it to complete
992 993 editor = ui.geteditor()
993 994 ui.system("%s \"%s\"" % (editor, patchfn),
994 995 environ={'HGUSER': ui.username()},
995 996 onerr=util.Abort, errprefix=_("edit failed"))
996 997 # Remove comment lines
997 998 patchfp = open(patchfn)
998 999 ncpatchfp = cStringIO.StringIO()
999 1000 for line in patchfp:
1000 1001 if not line.startswith('#'):
1001 1002 ncpatchfp.write(line)
1002 1003 patchfp.close()
1003 1004 ncpatchfp.seek(0)
1004 1005 newpatches = parsepatch(ncpatchfp)
1005 1006 finally:
1006 1007 os.unlink(patchfn)
1007 1008 del ncpatchfp
1008 1009 # Signal that the chunk shouldn't be applied as-is, but
1009 1010 # provide the new patch to be used instead.
1010 1011 ret = False
1011 1012 elif r == 3: # Skip
1012 1013 ret = skipfile = False
1013 1014 elif r == 4: # file (Record remaining)
1014 1015 ret = skipfile = True
1015 1016 elif r == 5: # done, skip remaining
1016 1017 ret = skipall = False
1017 1018 elif r == 6: # all
1018 1019 ret = skipall = True
1019 1020 elif r == 7: # quit
1020 1021 raise util.Abort(_('user quit'))
1021 1022 return ret, skipfile, skipall, newpatches
1022 1023
1023 1024 seen = set()
1024 1025 applied = {} # 'filename' -> [] of chunks
1025 1026 skipfile, skipall = None, None
1026 1027 pos, total = 1, sum(len(h.hunks) for h in headers)
1027 1028 for h in headers:
1028 1029 pos += len(h.hunks)
1029 1030 skipfile = None
1030 1031 fixoffset = 0
1031 1032 hdr = ''.join(h.header)
1032 1033 if hdr in seen:
1033 1034 continue
1034 1035 seen.add(hdr)
1035 1036 if skipall is None:
1036 1037 h.pretty(ui)
1037 1038 msg = (_('examine changes to %s?') %
1038 1039 _(' and ').join("'%s'" % f for f in h.files()))
1039 1040 r, skipfile, skipall, np = prompt(skipfile, skipall, msg, None)
1040 1041 if not r:
1041 1042 continue
1042 1043 applied[h.filename()] = [h]
1043 1044 if h.allhunks():
1044 1045 applied[h.filename()] += h.hunks
1045 1046 continue
1046 1047 for i, chunk in enumerate(h.hunks):
1047 1048 if skipfile is None and skipall is None:
1048 1049 chunk.pretty(ui)
1049 1050 if total == 1:
1050 1051 msg = _("record this change to '%s'?") % chunk.filename()
1051 1052 else:
1052 1053 idx = pos - len(h.hunks) + i
1053 1054 msg = _("record change %d/%d to '%s'?") % (idx, total,
1054 1055 chunk.filename())
1055 1056 r, skipfile, skipall, newpatches = prompt(skipfile,
1056 1057 skipall, msg, chunk)
1057 1058 if r:
1058 1059 if fixoffset:
1059 1060 chunk = copy.copy(chunk)
1060 1061 chunk.toline += fixoffset
1061 1062 applied[chunk.filename()].append(chunk)
1062 1063 elif newpatches is not None:
1063 1064 for newpatch in newpatches:
1064 1065 for newhunk in newpatch.hunks:
1065 1066 if fixoffset:
1066 1067 newhunk.toline += fixoffset
1067 1068 applied[newhunk.filename()].append(newhunk)
1068 1069 else:
1069 1070 fixoffset += chunk.removed - chunk.added
1070 1071 return sum([h for h in applied.itervalues()
1071 1072 if h[0].special() or len(h) > 1], [])
1072 1073 class hunk(object):
1073 1074 def __init__(self, desc, num, lr, context):
1074 1075 self.number = num
1075 1076 self.desc = desc
1076 1077 self.hunk = [desc]
1077 1078 self.a = []
1078 1079 self.b = []
1079 1080 self.starta = self.lena = None
1080 1081 self.startb = self.lenb = None
1081 1082 if lr is not None:
1082 1083 if context:
1083 1084 self.read_context_hunk(lr)
1084 1085 else:
1085 1086 self.read_unified_hunk(lr)
1086 1087
1087 1088 def getnormalized(self):
1088 1089 """Return a copy with line endings normalized to LF."""
1089 1090
1090 1091 def normalize(lines):
1091 1092 nlines = []
1092 1093 for line in lines:
1093 1094 if line.endswith('\r\n'):
1094 1095 line = line[:-2] + '\n'
1095 1096 nlines.append(line)
1096 1097 return nlines
1097 1098
1098 1099 # Dummy object, it is rebuilt manually
1099 1100 nh = hunk(self.desc, self.number, None, None)
1100 1101 nh.number = self.number
1101 1102 nh.desc = self.desc
1102 1103 nh.hunk = self.hunk
1103 1104 nh.a = normalize(self.a)
1104 1105 nh.b = normalize(self.b)
1105 1106 nh.starta = self.starta
1106 1107 nh.startb = self.startb
1107 1108 nh.lena = self.lena
1108 1109 nh.lenb = self.lenb
1109 1110 return nh
1110 1111
1111 1112 def read_unified_hunk(self, lr):
1112 1113 m = unidesc.match(self.desc)
1113 1114 if not m:
1114 1115 raise PatchError(_("bad hunk #%d") % self.number)
1115 1116 self.starta, self.lena, self.startb, self.lenb = m.groups()
1116 1117 if self.lena is None:
1117 1118 self.lena = 1
1118 1119 else:
1119 1120 self.lena = int(self.lena)
1120 1121 if self.lenb is None:
1121 1122 self.lenb = 1
1122 1123 else:
1123 1124 self.lenb = int(self.lenb)
1124 1125 self.starta = int(self.starta)
1125 1126 self.startb = int(self.startb)
1126 1127 diffhelpers.addlines(lr, self.hunk, self.lena, self.lenb, self.a,
1127 1128 self.b)
1128 1129 # if we hit eof before finishing out the hunk, the last line will
1129 1130 # be zero length. Lets try to fix it up.
1130 1131 while len(self.hunk[-1]) == 0:
1131 1132 del self.hunk[-1]
1132 1133 del self.a[-1]
1133 1134 del self.b[-1]
1134 1135 self.lena -= 1
1135 1136 self.lenb -= 1
1136 1137 self._fixnewline(lr)
1137 1138
1138 1139 def read_context_hunk(self, lr):
1139 1140 self.desc = lr.readline()
1140 1141 m = contextdesc.match(self.desc)
1141 1142 if not m:
1142 1143 raise PatchError(_("bad hunk #%d") % self.number)
1143 1144 self.starta, aend = m.groups()
1144 1145 self.starta = int(self.starta)
1145 1146 if aend is None:
1146 1147 aend = self.starta
1147 1148 self.lena = int(aend) - self.starta
1148 1149 if self.starta:
1149 1150 self.lena += 1
1150 1151 for x in xrange(self.lena):
1151 1152 l = lr.readline()
1152 1153 if l.startswith('---'):
1153 1154 # lines addition, old block is empty
1154 1155 lr.push(l)
1155 1156 break
1156 1157 s = l[2:]
1157 1158 if l.startswith('- ') or l.startswith('! '):
1158 1159 u = '-' + s
1159 1160 elif l.startswith(' '):
1160 1161 u = ' ' + s
1161 1162 else:
1162 1163 raise PatchError(_("bad hunk #%d old text line %d") %
1163 1164 (self.number, x))
1164 1165 self.a.append(u)
1165 1166 self.hunk.append(u)
1166 1167
1167 1168 l = lr.readline()
1168 1169 if l.startswith('\ '):
1169 1170 s = self.a[-1][:-1]
1170 1171 self.a[-1] = s
1171 1172 self.hunk[-1] = s
1172 1173 l = lr.readline()
1173 1174 m = contextdesc.match(l)
1174 1175 if not m:
1175 1176 raise PatchError(_("bad hunk #%d") % self.number)
1176 1177 self.startb, bend = m.groups()
1177 1178 self.startb = int(self.startb)
1178 1179 if bend is None:
1179 1180 bend = self.startb
1180 1181 self.lenb = int(bend) - self.startb
1181 1182 if self.startb:
1182 1183 self.lenb += 1
1183 1184 hunki = 1
1184 1185 for x in xrange(self.lenb):
1185 1186 l = lr.readline()
1186 1187 if l.startswith('\ '):
1187 1188 # XXX: the only way to hit this is with an invalid line range.
1188 1189 # The no-eol marker is not counted in the line range, but I
1189 1190 # guess there are diff(1) out there which behave differently.
1190 1191 s = self.b[-1][:-1]
1191 1192 self.b[-1] = s
1192 1193 self.hunk[hunki - 1] = s
1193 1194 continue
1194 1195 if not l:
1195 1196 # line deletions, new block is empty and we hit EOF
1196 1197 lr.push(l)
1197 1198 break
1198 1199 s = l[2:]
1199 1200 if l.startswith('+ ') or l.startswith('! '):
1200 1201 u = '+' + s
1201 1202 elif l.startswith(' '):
1202 1203 u = ' ' + s
1203 1204 elif len(self.b) == 0:
1204 1205 # line deletions, new block is empty
1205 1206 lr.push(l)
1206 1207 break
1207 1208 else:
1208 1209 raise PatchError(_("bad hunk #%d old text line %d") %
1209 1210 (self.number, x))
1210 1211 self.b.append(s)
1211 1212 while True:
1212 1213 if hunki >= len(self.hunk):
1213 1214 h = ""
1214 1215 else:
1215 1216 h = self.hunk[hunki]
1216 1217 hunki += 1
1217 1218 if h == u:
1218 1219 break
1219 1220 elif h.startswith('-'):
1220 1221 continue
1221 1222 else:
1222 1223 self.hunk.insert(hunki - 1, u)
1223 1224 break
1224 1225
1225 1226 if not self.a:
1226 1227 # this happens when lines were only added to the hunk
1227 1228 for x in self.hunk:
1228 1229 if x.startswith('-') or x.startswith(' '):
1229 1230 self.a.append(x)
1230 1231 if not self.b:
1231 1232 # this happens when lines were only deleted from the hunk
1232 1233 for x in self.hunk:
1233 1234 if x.startswith('+') or x.startswith(' '):
1234 1235 self.b.append(x[1:])
1235 1236 # @@ -start,len +start,len @@
1236 1237 self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
1237 1238 self.startb, self.lenb)
1238 1239 self.hunk[0] = self.desc
1239 1240 self._fixnewline(lr)
1240 1241
1241 1242 def _fixnewline(self, lr):
1242 1243 l = lr.readline()
1243 1244 if l.startswith('\ '):
1244 1245 diffhelpers.fix_newline(self.hunk, self.a, self.b)
1245 1246 else:
1246 1247 lr.push(l)
1247 1248
1248 1249 def complete(self):
1249 1250 return len(self.a) == self.lena and len(self.b) == self.lenb
1250 1251
1251 1252 def _fuzzit(self, old, new, fuzz, toponly):
1252 1253 # this removes context lines from the top and bottom of list 'l'. It
1253 1254 # checks the hunk to make sure only context lines are removed, and then
1254 1255 # returns a new shortened list of lines.
1255 1256 fuzz = min(fuzz, len(old))
1256 1257 if fuzz:
1257 1258 top = 0
1258 1259 bot = 0
1259 1260 hlen = len(self.hunk)
1260 1261 for x in xrange(hlen - 1):
1261 1262 # the hunk starts with the @@ line, so use x+1
1262 1263 if self.hunk[x + 1][0] == ' ':
1263 1264 top += 1
1264 1265 else:
1265 1266 break
1266 1267 if not toponly:
1267 1268 for x in xrange(hlen - 1):
1268 1269 if self.hunk[hlen - bot - 1][0] == ' ':
1269 1270 bot += 1
1270 1271 else:
1271 1272 break
1272 1273
1273 1274 bot = min(fuzz, bot)
1274 1275 top = min(fuzz, top)
1275 1276 return old[top:len(old) - bot], new[top:len(new) - bot], top
1276 1277 return old, new, 0
1277 1278
1278 1279 def fuzzit(self, fuzz, toponly):
1279 1280 old, new, top = self._fuzzit(self.a, self.b, fuzz, toponly)
1280 1281 oldstart = self.starta + top
1281 1282 newstart = self.startb + top
1282 1283 # zero length hunk ranges already have their start decremented
1283 1284 if self.lena and oldstart > 0:
1284 1285 oldstart -= 1
1285 1286 if self.lenb and newstart > 0:
1286 1287 newstart -= 1
1287 1288 return old, oldstart, new, newstart
1288 1289
1289 1290 class binhunk(object):
1290 1291 'A binary patch file.'
1291 1292 def __init__(self, lr, fname):
1292 1293 self.text = None
1293 1294 self.delta = False
1294 1295 self.hunk = ['GIT binary patch\n']
1295 1296 self._fname = fname
1296 1297 self._read(lr)
1297 1298
1298 1299 def complete(self):
1299 1300 return self.text is not None
1300 1301
1301 1302 def new(self, lines):
1302 1303 if self.delta:
1303 1304 return [applybindelta(self.text, ''.join(lines))]
1304 1305 return [self.text]
1305 1306
1306 1307 def _read(self, lr):
1307 1308 def getline(lr, hunk):
1308 1309 l = lr.readline()
1309 1310 hunk.append(l)
1310 1311 return l.rstrip('\r\n')
1311 1312
1312 1313 size = 0
1313 1314 while True:
1314 1315 line = getline(lr, self.hunk)
1315 1316 if not line:
1316 1317 raise PatchError(_('could not extract "%s" binary data')
1317 1318 % self._fname)
1318 1319 if line.startswith('literal '):
1319 1320 size = int(line[8:].rstrip())
1320 1321 break
1321 1322 if line.startswith('delta '):
1322 1323 size = int(line[6:].rstrip())
1323 1324 self.delta = True
1324 1325 break
1325 1326 dec = []
1326 1327 line = getline(lr, self.hunk)
1327 1328 while len(line) > 1:
1328 1329 l = line[0]
1329 1330 if l <= 'Z' and l >= 'A':
1330 1331 l = ord(l) - ord('A') + 1
1331 1332 else:
1332 1333 l = ord(l) - ord('a') + 27
1333 1334 try:
1334 1335 dec.append(base85.b85decode(line[1:])[:l])
1335 1336 except ValueError, e:
1336 1337 raise PatchError(_('could not decode "%s" binary patch: %s')
1337 1338 % (self._fname, str(e)))
1338 1339 line = getline(lr, self.hunk)
1339 1340 text = zlib.decompress(''.join(dec))
1340 1341 if len(text) != size:
1341 1342 raise PatchError(_('"%s" length is %d bytes, should be %d')
1342 1343 % (self._fname, len(text), size))
1343 1344 self.text = text
1344 1345
1345 1346 def parsefilename(str):
1346 1347 # --- filename \t|space stuff
1347 1348 s = str[4:].rstrip('\r\n')
1348 1349 i = s.find('\t')
1349 1350 if i < 0:
1350 1351 i = s.find(' ')
1351 1352 if i < 0:
1352 1353 return s
1353 1354 return s[:i]
1354 1355
1355 def parsepatch(fp):
1356 def parsepatch(originalchunks):
1356 1357 """patch -> [] of headers -> [] of hunks """
1357 1358 class parser(object):
1358 1359 """patch parsing state machine"""
1359 1360 def __init__(self):
1360 1361 self.fromline = 0
1361 1362 self.toline = 0
1362 1363 self.proc = ''
1363 1364 self.header = None
1364 1365 self.context = []
1365 1366 self.before = []
1366 1367 self.hunk = []
1367 1368 self.headers = []
1368 1369
1369 1370 def addrange(self, limits):
1370 1371 fromstart, fromend, tostart, toend, proc = limits
1371 1372 self.fromline = int(fromstart)
1372 1373 self.toline = int(tostart)
1373 1374 self.proc = proc
1374 1375
1375 1376 def addcontext(self, context):
1376 1377 if self.hunk:
1377 1378 h = recordhunk(self.header, self.fromline, self.toline,
1378 1379 self.proc, self.before, self.hunk, context)
1379 1380 self.header.hunks.append(h)
1380 1381 self.fromline += len(self.before) + h.removed
1381 1382 self.toline += len(self.before) + h.added
1382 1383 self.before = []
1383 1384 self.hunk = []
1384 1385 self.proc = ''
1385 1386 self.context = context
1386 1387
1387 1388 def addhunk(self, hunk):
1388 1389 if self.context:
1389 1390 self.before = self.context
1390 1391 self.context = []
1391 1392 self.hunk = hunk
1392 1393
1393 1394 def newfile(self, hdr):
1394 1395 self.addcontext([])
1395 1396 h = header(hdr)
1396 1397 self.headers.append(h)
1397 1398 self.header = h
1398 1399
1399 1400 def addother(self, line):
1400 1401 pass # 'other' lines are ignored
1401 1402
1402 1403 def finished(self):
1403 1404 self.addcontext([])
1404 1405 return self.headers
1405 1406
1406 1407 transitions = {
1407 1408 'file': {'context': addcontext,
1408 1409 'file': newfile,
1409 1410 'hunk': addhunk,
1410 1411 'range': addrange},
1411 1412 'context': {'file': newfile,
1412 1413 'hunk': addhunk,
1413 1414 'range': addrange,
1414 1415 'other': addother},
1415 1416 'hunk': {'context': addcontext,
1416 1417 'file': newfile,
1417 1418 'range': addrange},
1418 1419 'range': {'context': addcontext,
1419 1420 'hunk': addhunk},
1420 1421 'other': {'other': addother},
1421 1422 }
1422 1423
1423 1424 p = parser()
1425 fp = cStringIO.StringIO()
1426 fp.write(''.join(originalchunks))
1427 fp.seek(0)
1424 1428
1425 1429 state = 'context'
1426 1430 for newstate, data in scanpatch(fp):
1427 1431 try:
1428 1432 p.transitions[state][newstate](p, data)
1429 1433 except KeyError:
1430 1434 raise PatchError('unhandled transition: %s -> %s' %
1431 1435 (state, newstate))
1432 1436 state = newstate
1437 del fp
1433 1438 return p.finished()
1434 1439
1435 1440 def pathtransform(path, strip, prefix):
1436 1441 '''turn a path from a patch into a path suitable for the repository
1437 1442
1438 1443 prefix, if not empty, is expected to be normalized with a / at the end.
1439 1444
1440 1445 Returns (stripped components, path in repository).
1441 1446
1442 1447 >>> pathtransform('a/b/c', 0, '')
1443 1448 ('', 'a/b/c')
1444 1449 >>> pathtransform(' a/b/c ', 0, '')
1445 1450 ('', ' a/b/c')
1446 1451 >>> pathtransform(' a/b/c ', 2, '')
1447 1452 ('a/b/', 'c')
1448 1453 >>> pathtransform(' a//b/c ', 2, 'd/e/')
1449 1454 ('a//b/', 'd/e/c')
1450 1455 >>> pathtransform('a/b/c', 3, '')
1451 1456 Traceback (most recent call last):
1452 1457 PatchError: unable to strip away 1 of 3 dirs from a/b/c
1453 1458 '''
1454 1459 pathlen = len(path)
1455 1460 i = 0
1456 1461 if strip == 0:
1457 1462 return '', path.rstrip()
1458 1463 count = strip
1459 1464 while count > 0:
1460 1465 i = path.find('/', i)
1461 1466 if i == -1:
1462 1467 raise PatchError(_("unable to strip away %d of %d dirs from %s") %
1463 1468 (count, strip, path))
1464 1469 i += 1
1465 1470 # consume '//' in the path
1466 1471 while i < pathlen - 1 and path[i] == '/':
1467 1472 i += 1
1468 1473 count -= 1
1469 1474 return path[:i].lstrip(), prefix + path[i:].rstrip()
1470 1475
1471 1476 def makepatchmeta(backend, afile_orig, bfile_orig, hunk, strip, prefix):
1472 1477 nulla = afile_orig == "/dev/null"
1473 1478 nullb = bfile_orig == "/dev/null"
1474 1479 create = nulla and hunk.starta == 0 and hunk.lena == 0
1475 1480 remove = nullb and hunk.startb == 0 and hunk.lenb == 0
1476 1481 abase, afile = pathtransform(afile_orig, strip, prefix)
1477 1482 gooda = not nulla and backend.exists(afile)
1478 1483 bbase, bfile = pathtransform(bfile_orig, strip, prefix)
1479 1484 if afile == bfile:
1480 1485 goodb = gooda
1481 1486 else:
1482 1487 goodb = not nullb and backend.exists(bfile)
1483 1488 missing = not goodb and not gooda and not create
1484 1489
1485 1490 # some diff programs apparently produce patches where the afile is
1486 1491 # not /dev/null, but afile starts with bfile
1487 1492 abasedir = afile[:afile.rfind('/') + 1]
1488 1493 bbasedir = bfile[:bfile.rfind('/') + 1]
1489 1494 if (missing and abasedir == bbasedir and afile.startswith(bfile)
1490 1495 and hunk.starta == 0 and hunk.lena == 0):
1491 1496 create = True
1492 1497 missing = False
1493 1498
1494 1499 # If afile is "a/b/foo" and bfile is "a/b/foo.orig" we assume the
1495 1500 # diff is between a file and its backup. In this case, the original
1496 1501 # file should be patched (see original mpatch code).
1497 1502 isbackup = (abase == bbase and bfile.startswith(afile))
1498 1503 fname = None
1499 1504 if not missing:
1500 1505 if gooda and goodb:
1501 1506 if isbackup:
1502 1507 fname = afile
1503 1508 else:
1504 1509 fname = bfile
1505 1510 elif gooda:
1506 1511 fname = afile
1507 1512
1508 1513 if not fname:
1509 1514 if not nullb:
1510 1515 if isbackup:
1511 1516 fname = afile
1512 1517 else:
1513 1518 fname = bfile
1514 1519 elif not nulla:
1515 1520 fname = afile
1516 1521 else:
1517 1522 raise PatchError(_("undefined source and destination files"))
1518 1523
1519 1524 gp = patchmeta(fname)
1520 1525 if create:
1521 1526 gp.op = 'ADD'
1522 1527 elif remove:
1523 1528 gp.op = 'DELETE'
1524 1529 return gp
1525 1530
1526 1531 def scanpatch(fp):
1527 1532 """like patch.iterhunks, but yield different events
1528 1533
1529 1534 - ('file', [header_lines + fromfile + tofile])
1530 1535 - ('context', [context_lines])
1531 1536 - ('hunk', [hunk_lines])
1532 1537 - ('range', (-start,len, +start,len, proc))
1533 1538 """
1534 1539 lines_re = re.compile(r'@@ -(\d+),(\d+) \+(\d+),(\d+) @@\s*(.*)')
1535 1540 lr = linereader(fp)
1536 1541
1537 1542 def scanwhile(first, p):
1538 1543 """scan lr while predicate holds"""
1539 1544 lines = [first]
1540 1545 while True:
1541 1546 line = lr.readline()
1542 1547 if not line:
1543 1548 break
1544 1549 if p(line):
1545 1550 lines.append(line)
1546 1551 else:
1547 1552 lr.push(line)
1548 1553 break
1549 1554 return lines
1550 1555
1551 1556 while True:
1552 1557 line = lr.readline()
1553 1558 if not line:
1554 1559 break
1555 1560 if line.startswith('diff --git a/') or line.startswith('diff -r '):
1556 1561 def notheader(line):
1557 1562 s = line.split(None, 1)
1558 1563 return not s or s[0] not in ('---', 'diff')
1559 1564 header = scanwhile(line, notheader)
1560 1565 fromfile = lr.readline()
1561 1566 if fromfile.startswith('---'):
1562 1567 tofile = lr.readline()
1563 1568 header += [fromfile, tofile]
1564 1569 else:
1565 1570 lr.push(fromfile)
1566 1571 yield 'file', header
1567 1572 elif line[0] == ' ':
1568 1573 yield 'context', scanwhile(line, lambda l: l[0] in ' \\')
1569 1574 elif line[0] in '-+':
1570 1575 yield 'hunk', scanwhile(line, lambda l: l[0] in '-+\\')
1571 1576 else:
1572 1577 m = lines_re.match(line)
1573 1578 if m:
1574 1579 yield 'range', m.groups()
1575 1580 else:
1576 1581 yield 'other', line
1577 1582
1578 1583 def scangitpatch(lr, firstline):
1579 1584 """
1580 1585 Git patches can emit:
1581 1586 - rename a to b
1582 1587 - change b
1583 1588 - copy a to c
1584 1589 - change c
1585 1590
1586 1591 We cannot apply this sequence as-is, the renamed 'a' could not be
1587 1592 found for it would have been renamed already. And we cannot copy
1588 1593 from 'b' instead because 'b' would have been changed already. So
1589 1594 we scan the git patch for copy and rename commands so we can
1590 1595 perform the copies ahead of time.
1591 1596 """
1592 1597 pos = 0
1593 1598 try:
1594 1599 pos = lr.fp.tell()
1595 1600 fp = lr.fp
1596 1601 except IOError:
1597 1602 fp = cStringIO.StringIO(lr.fp.read())
1598 1603 gitlr = linereader(fp)
1599 1604 gitlr.push(firstline)
1600 1605 gitpatches = readgitpatch(gitlr)
1601 1606 fp.seek(pos)
1602 1607 return gitpatches
1603 1608
1604 1609 def iterhunks(fp):
1605 1610 """Read a patch and yield the following events:
1606 1611 - ("file", afile, bfile, firsthunk): select a new target file.
1607 1612 - ("hunk", hunk): a new hunk is ready to be applied, follows a
1608 1613 "file" event.
1609 1614 - ("git", gitchanges): current diff is in git format, gitchanges
1610 1615 maps filenames to gitpatch records. Unique event.
1611 1616 """
1612 1617 afile = ""
1613 1618 bfile = ""
1614 1619 state = None
1615 1620 hunknum = 0
1616 1621 emitfile = newfile = False
1617 1622 gitpatches = None
1618 1623
1619 1624 # our states
1620 1625 BFILE = 1
1621 1626 context = None
1622 1627 lr = linereader(fp)
1623 1628
1624 1629 while True:
1625 1630 x = lr.readline()
1626 1631 if not x:
1627 1632 break
1628 1633 if state == BFILE and (
1629 1634 (not context and x[0] == '@')
1630 1635 or (context is not False and x.startswith('***************'))
1631 1636 or x.startswith('GIT binary patch')):
1632 1637 gp = None
1633 1638 if (gitpatches and
1634 1639 gitpatches[-1].ispatching(afile, bfile)):
1635 1640 gp = gitpatches.pop()
1636 1641 if x.startswith('GIT binary patch'):
1637 1642 h = binhunk(lr, gp.path)
1638 1643 else:
1639 1644 if context is None and x.startswith('***************'):
1640 1645 context = True
1641 1646 h = hunk(x, hunknum + 1, lr, context)
1642 1647 hunknum += 1
1643 1648 if emitfile:
1644 1649 emitfile = False
1645 1650 yield 'file', (afile, bfile, h, gp and gp.copy() or None)
1646 1651 yield 'hunk', h
1647 1652 elif x.startswith('diff --git a/'):
1648 1653 m = gitre.match(x.rstrip(' \r\n'))
1649 1654 if not m:
1650 1655 continue
1651 1656 if gitpatches is None:
1652 1657 # scan whole input for git metadata
1653 1658 gitpatches = scangitpatch(lr, x)
1654 1659 yield 'git', [g.copy() for g in gitpatches
1655 1660 if g.op in ('COPY', 'RENAME')]
1656 1661 gitpatches.reverse()
1657 1662 afile = 'a/' + m.group(1)
1658 1663 bfile = 'b/' + m.group(2)
1659 1664 while gitpatches and not gitpatches[-1].ispatching(afile, bfile):
1660 1665 gp = gitpatches.pop()
1661 1666 yield 'file', ('a/' + gp.path, 'b/' + gp.path, None, gp.copy())
1662 1667 if not gitpatches:
1663 1668 raise PatchError(_('failed to synchronize metadata for "%s"')
1664 1669 % afile[2:])
1665 1670 gp = gitpatches[-1]
1666 1671 newfile = True
1667 1672 elif x.startswith('---'):
1668 1673 # check for a unified diff
1669 1674 l2 = lr.readline()
1670 1675 if not l2.startswith('+++'):
1671 1676 lr.push(l2)
1672 1677 continue
1673 1678 newfile = True
1674 1679 context = False
1675 1680 afile = parsefilename(x)
1676 1681 bfile = parsefilename(l2)
1677 1682 elif x.startswith('***'):
1678 1683 # check for a context diff
1679 1684 l2 = lr.readline()
1680 1685 if not l2.startswith('---'):
1681 1686 lr.push(l2)
1682 1687 continue
1683 1688 l3 = lr.readline()
1684 1689 lr.push(l3)
1685 1690 if not l3.startswith("***************"):
1686 1691 lr.push(l2)
1687 1692 continue
1688 1693 newfile = True
1689 1694 context = True
1690 1695 afile = parsefilename(x)
1691 1696 bfile = parsefilename(l2)
1692 1697
1693 1698 if newfile:
1694 1699 newfile = False
1695 1700 emitfile = True
1696 1701 state = BFILE
1697 1702 hunknum = 0
1698 1703
1699 1704 while gitpatches:
1700 1705 gp = gitpatches.pop()
1701 1706 yield 'file', ('a/' + gp.path, 'b/' + gp.path, None, gp.copy())
1702 1707
1703 1708 def applybindelta(binchunk, data):
1704 1709 """Apply a binary delta hunk
1705 1710 The algorithm used is the algorithm from git's patch-delta.c
1706 1711 """
1707 1712 def deltahead(binchunk):
1708 1713 i = 0
1709 1714 for c in binchunk:
1710 1715 i += 1
1711 1716 if not (ord(c) & 0x80):
1712 1717 return i
1713 1718 return i
1714 1719 out = ""
1715 1720 s = deltahead(binchunk)
1716 1721 binchunk = binchunk[s:]
1717 1722 s = deltahead(binchunk)
1718 1723 binchunk = binchunk[s:]
1719 1724 i = 0
1720 1725 while i < len(binchunk):
1721 1726 cmd = ord(binchunk[i])
1722 1727 i += 1
1723 1728 if (cmd & 0x80):
1724 1729 offset = 0
1725 1730 size = 0
1726 1731 if (cmd & 0x01):
1727 1732 offset = ord(binchunk[i])
1728 1733 i += 1
1729 1734 if (cmd & 0x02):
1730 1735 offset |= ord(binchunk[i]) << 8
1731 1736 i += 1
1732 1737 if (cmd & 0x04):
1733 1738 offset |= ord(binchunk[i]) << 16
1734 1739 i += 1
1735 1740 if (cmd & 0x08):
1736 1741 offset |= ord(binchunk[i]) << 24
1737 1742 i += 1
1738 1743 if (cmd & 0x10):
1739 1744 size = ord(binchunk[i])
1740 1745 i += 1
1741 1746 if (cmd & 0x20):
1742 1747 size |= ord(binchunk[i]) << 8
1743 1748 i += 1
1744 1749 if (cmd & 0x40):
1745 1750 size |= ord(binchunk[i]) << 16
1746 1751 i += 1
1747 1752 if size == 0:
1748 1753 size = 0x10000
1749 1754 offset_end = offset + size
1750 1755 out += data[offset:offset_end]
1751 1756 elif cmd != 0:
1752 1757 offset_end = i + cmd
1753 1758 out += binchunk[i:offset_end]
1754 1759 i += cmd
1755 1760 else:
1756 1761 raise PatchError(_('unexpected delta opcode 0'))
1757 1762 return out
1758 1763
1759 1764 def applydiff(ui, fp, backend, store, strip=1, prefix='', eolmode='strict'):
1760 1765 """Reads a patch from fp and tries to apply it.
1761 1766
1762 1767 Returns 0 for a clean patch, -1 if any rejects were found and 1 if
1763 1768 there was any fuzz.
1764 1769
1765 1770 If 'eolmode' is 'strict', the patch content and patched file are
1766 1771 read in binary mode. Otherwise, line endings are ignored when
1767 1772 patching then normalized according to 'eolmode'.
1768 1773 """
1769 1774 return _applydiff(ui, fp, patchfile, backend, store, strip=strip,
1770 1775 prefix=prefix, eolmode=eolmode)
1771 1776
1772 1777 def _applydiff(ui, fp, patcher, backend, store, strip=1, prefix='',
1773 1778 eolmode='strict'):
1774 1779
1775 1780 if prefix:
1776 1781 # clean up double slashes, lack of trailing slashes, etc
1777 1782 prefix = util.normpath(prefix) + '/'
1778 1783 def pstrip(p):
1779 1784 return pathtransform(p, strip - 1, prefix)[1]
1780 1785
1781 1786 rejects = 0
1782 1787 err = 0
1783 1788 current_file = None
1784 1789
1785 1790 for state, values in iterhunks(fp):
1786 1791 if state == 'hunk':
1787 1792 if not current_file:
1788 1793 continue
1789 1794 ret = current_file.apply(values)
1790 1795 if ret > 0:
1791 1796 err = 1
1792 1797 elif state == 'file':
1793 1798 if current_file:
1794 1799 rejects += current_file.close()
1795 1800 current_file = None
1796 1801 afile, bfile, first_hunk, gp = values
1797 1802 if gp:
1798 1803 gp.path = pstrip(gp.path)
1799 1804 if gp.oldpath:
1800 1805 gp.oldpath = pstrip(gp.oldpath)
1801 1806 else:
1802 1807 gp = makepatchmeta(backend, afile, bfile, first_hunk, strip,
1803 1808 prefix)
1804 1809 if gp.op == 'RENAME':
1805 1810 backend.unlink(gp.oldpath)
1806 1811 if not first_hunk:
1807 1812 if gp.op == 'DELETE':
1808 1813 backend.unlink(gp.path)
1809 1814 continue
1810 1815 data, mode = None, None
1811 1816 if gp.op in ('RENAME', 'COPY'):
1812 1817 data, mode = store.getfile(gp.oldpath)[:2]
1813 1818 # FIXME: failing getfile has never been handled here
1814 1819 assert data is not None
1815 1820 if gp.mode:
1816 1821 mode = gp.mode
1817 1822 if gp.op == 'ADD':
1818 1823 # Added files without content have no hunk and
1819 1824 # must be created
1820 1825 data = ''
1821 1826 if data or mode:
1822 1827 if (gp.op in ('ADD', 'RENAME', 'COPY')
1823 1828 and backend.exists(gp.path)):
1824 1829 raise PatchError(_("cannot create %s: destination "
1825 1830 "already exists") % gp.path)
1826 1831 backend.setfile(gp.path, data, mode, gp.oldpath)
1827 1832 continue
1828 1833 try:
1829 1834 current_file = patcher(ui, gp, backend, store,
1830 1835 eolmode=eolmode)
1831 1836 except PatchError, inst:
1832 1837 ui.warn(str(inst) + '\n')
1833 1838 current_file = None
1834 1839 rejects += 1
1835 1840 continue
1836 1841 elif state == 'git':
1837 1842 for gp in values:
1838 1843 path = pstrip(gp.oldpath)
1839 1844 data, mode = backend.getfile(path)
1840 1845 if data is None:
1841 1846 # The error ignored here will trigger a getfile()
1842 1847 # error in a place more appropriate for error
1843 1848 # handling, and will not interrupt the patching
1844 1849 # process.
1845 1850 pass
1846 1851 else:
1847 1852 store.setfile(path, data, mode)
1848 1853 else:
1849 1854 raise util.Abort(_('unsupported parser state: %s') % state)
1850 1855
1851 1856 if current_file:
1852 1857 rejects += current_file.close()
1853 1858
1854 1859 if rejects:
1855 1860 return -1
1856 1861 return err
1857 1862
1858 1863 def _externalpatch(ui, repo, patcher, patchname, strip, files,
1859 1864 similarity):
1860 1865 """use <patcher> to apply <patchname> to the working directory.
1861 1866 returns whether patch was applied with fuzz factor."""
1862 1867
1863 1868 fuzz = False
1864 1869 args = []
1865 1870 cwd = repo.root
1866 1871 if cwd:
1867 1872 args.append('-d %s' % util.shellquote(cwd))
1868 1873 fp = util.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
1869 1874 util.shellquote(patchname)))
1870 1875 try:
1871 1876 for line in fp:
1872 1877 line = line.rstrip()
1873 1878 ui.note(line + '\n')
1874 1879 if line.startswith('patching file '):
1875 1880 pf = util.parsepatchoutput(line)
1876 1881 printed_file = False
1877 1882 files.add(pf)
1878 1883 elif line.find('with fuzz') >= 0:
1879 1884 fuzz = True
1880 1885 if not printed_file:
1881 1886 ui.warn(pf + '\n')
1882 1887 printed_file = True
1883 1888 ui.warn(line + '\n')
1884 1889 elif line.find('saving rejects to file') >= 0:
1885 1890 ui.warn(line + '\n')
1886 1891 elif line.find('FAILED') >= 0:
1887 1892 if not printed_file:
1888 1893 ui.warn(pf + '\n')
1889 1894 printed_file = True
1890 1895 ui.warn(line + '\n')
1891 1896 finally:
1892 1897 if files:
1893 1898 scmutil.marktouched(repo, files, similarity)
1894 1899 code = fp.close()
1895 1900 if code:
1896 1901 raise PatchError(_("patch command failed: %s") %
1897 1902 util.explainexit(code)[0])
1898 1903 return fuzz
1899 1904
1900 1905 def patchbackend(ui, backend, patchobj, strip, prefix, files=None,
1901 1906 eolmode='strict'):
1902 1907 if files is None:
1903 1908 files = set()
1904 1909 if eolmode is None:
1905 1910 eolmode = ui.config('patch', 'eol', 'strict')
1906 1911 if eolmode.lower() not in eolmodes:
1907 1912 raise util.Abort(_('unsupported line endings type: %s') % eolmode)
1908 1913 eolmode = eolmode.lower()
1909 1914
1910 1915 store = filestore()
1911 1916 try:
1912 1917 fp = open(patchobj, 'rb')
1913 1918 except TypeError:
1914 1919 fp = patchobj
1915 1920 try:
1916 1921 ret = applydiff(ui, fp, backend, store, strip=strip, prefix=prefix,
1917 1922 eolmode=eolmode)
1918 1923 finally:
1919 1924 if fp != patchobj:
1920 1925 fp.close()
1921 1926 files.update(backend.close())
1922 1927 store.close()
1923 1928 if ret < 0:
1924 1929 raise PatchError(_('patch failed to apply'))
1925 1930 return ret > 0
1926 1931
1927 1932 def internalpatch(ui, repo, patchobj, strip, prefix='', files=None,
1928 1933 eolmode='strict', similarity=0):
1929 1934 """use builtin patch to apply <patchobj> to the working directory.
1930 1935 returns whether patch was applied with fuzz factor."""
1931 1936 backend = workingbackend(ui, repo, similarity)
1932 1937 return patchbackend(ui, backend, patchobj, strip, prefix, files, eolmode)
1933 1938
1934 1939 def patchrepo(ui, repo, ctx, store, patchobj, strip, prefix, files=None,
1935 1940 eolmode='strict'):
1936 1941 backend = repobackend(ui, repo, ctx, store)
1937 1942 return patchbackend(ui, backend, patchobj, strip, prefix, files, eolmode)
1938 1943
1939 1944 def patch(ui, repo, patchname, strip=1, prefix='', files=None, eolmode='strict',
1940 1945 similarity=0):
1941 1946 """Apply <patchname> to the working directory.
1942 1947
1943 1948 'eolmode' specifies how end of lines should be handled. It can be:
1944 1949 - 'strict': inputs are read in binary mode, EOLs are preserved
1945 1950 - 'crlf': EOLs are ignored when patching and reset to CRLF
1946 1951 - 'lf': EOLs are ignored when patching and reset to LF
1947 1952 - None: get it from user settings, default to 'strict'
1948 1953 'eolmode' is ignored when using an external patcher program.
1949 1954
1950 1955 Returns whether patch was applied with fuzz factor.
1951 1956 """
1952 1957 patcher = ui.config('ui', 'patch')
1953 1958 if files is None:
1954 1959 files = set()
1955 1960 if patcher:
1956 1961 return _externalpatch(ui, repo, patcher, patchname, strip,
1957 1962 files, similarity)
1958 1963 return internalpatch(ui, repo, patchname, strip, prefix, files, eolmode,
1959 1964 similarity)
1960 1965
1961 1966 def changedfiles(ui, repo, patchpath, strip=1):
1962 1967 backend = fsbackend(ui, repo.root)
1963 1968 fp = open(patchpath, 'rb')
1964 1969 try:
1965 1970 changed = set()
1966 1971 for state, values in iterhunks(fp):
1967 1972 if state == 'file':
1968 1973 afile, bfile, first_hunk, gp = values
1969 1974 if gp:
1970 1975 gp.path = pathtransform(gp.path, strip - 1, '')[1]
1971 1976 if gp.oldpath:
1972 1977 gp.oldpath = pathtransform(gp.oldpath, strip - 1, '')[1]
1973 1978 else:
1974 1979 gp = makepatchmeta(backend, afile, bfile, first_hunk, strip,
1975 1980 '')
1976 1981 changed.add(gp.path)
1977 1982 if gp.op == 'RENAME':
1978 1983 changed.add(gp.oldpath)
1979 1984 elif state not in ('hunk', 'git'):
1980 1985 raise util.Abort(_('unsupported parser state: %s') % state)
1981 1986 return changed
1982 1987 finally:
1983 1988 fp.close()
1984 1989
1985 1990 class GitDiffRequired(Exception):
1986 1991 pass
1987 1992
1988 1993 def diffallopts(ui, opts=None, untrusted=False, section='diff'):
1989 1994 '''return diffopts with all features supported and parsed'''
1990 1995 return difffeatureopts(ui, opts=opts, untrusted=untrusted, section=section,
1991 1996 git=True, whitespace=True, formatchanging=True)
1992 1997
1993 1998 diffopts = diffallopts
1994 1999
1995 2000 def difffeatureopts(ui, opts=None, untrusted=False, section='diff', git=False,
1996 2001 whitespace=False, formatchanging=False):
1997 2002 '''return diffopts with only opted-in features parsed
1998 2003
1999 2004 Features:
2000 2005 - git: git-style diffs
2001 2006 - whitespace: whitespace options like ignoreblanklines and ignorews
2002 2007 - formatchanging: options that will likely break or cause correctness issues
2003 2008 with most diff parsers
2004 2009 '''
2005 2010 def get(key, name=None, getter=ui.configbool, forceplain=None):
2006 2011 if opts:
2007 2012 v = opts.get(key)
2008 2013 if v:
2009 2014 return v
2010 2015 if forceplain is not None and ui.plain():
2011 2016 return forceplain
2012 2017 return getter(section, name or key, None, untrusted=untrusted)
2013 2018
2014 2019 # core options, expected to be understood by every diff parser
2015 2020 buildopts = {
2016 2021 'nodates': get('nodates'),
2017 2022 'showfunc': get('show_function', 'showfunc'),
2018 2023 'context': get('unified', getter=ui.config),
2019 2024 }
2020 2025
2021 2026 if git:
2022 2027 buildopts['git'] = get('git')
2023 2028 if whitespace:
2024 2029 buildopts['ignorews'] = get('ignore_all_space', 'ignorews')
2025 2030 buildopts['ignorewsamount'] = get('ignore_space_change',
2026 2031 'ignorewsamount')
2027 2032 buildopts['ignoreblanklines'] = get('ignore_blank_lines',
2028 2033 'ignoreblanklines')
2029 2034 if formatchanging:
2030 2035 buildopts['text'] = opts and opts.get('text')
2031 2036 buildopts['nobinary'] = get('nobinary')
2032 2037 buildopts['noprefix'] = get('noprefix', forceplain=False)
2033 2038
2034 2039 return mdiff.diffopts(**buildopts)
2035 2040
2036 2041 def diff(repo, node1=None, node2=None, match=None, changes=None, opts=None,
2037 2042 losedatafn=None, prefix=''):
2038 2043 '''yields diff of changes to files between two nodes, or node and
2039 2044 working directory.
2040 2045
2041 2046 if node1 is None, use first dirstate parent instead.
2042 2047 if node2 is None, compare node1 with working directory.
2043 2048
2044 2049 losedatafn(**kwarg) is a callable run when opts.upgrade=True and
2045 2050 every time some change cannot be represented with the current
2046 2051 patch format. Return False to upgrade to git patch format, True to
2047 2052 accept the loss or raise an exception to abort the diff. It is
2048 2053 called with the name of current file being diffed as 'fn'. If set
2049 2054 to None, patches will always be upgraded to git format when
2050 2055 necessary.
2051 2056
2052 2057 prefix is a filename prefix that is prepended to all filenames on
2053 2058 display (used for subrepos).
2054 2059 '''
2055 2060
2056 2061 if opts is None:
2057 2062 opts = mdiff.defaultopts
2058 2063
2059 2064 if not node1 and not node2:
2060 2065 node1 = repo.dirstate.p1()
2061 2066
2062 2067 def lrugetfilectx():
2063 2068 cache = {}
2064 2069 order = util.deque()
2065 2070 def getfilectx(f, ctx):
2066 2071 fctx = ctx.filectx(f, filelog=cache.get(f))
2067 2072 if f not in cache:
2068 2073 if len(cache) > 20:
2069 2074 del cache[order.popleft()]
2070 2075 cache[f] = fctx.filelog()
2071 2076 else:
2072 2077 order.remove(f)
2073 2078 order.append(f)
2074 2079 return fctx
2075 2080 return getfilectx
2076 2081 getfilectx = lrugetfilectx()
2077 2082
2078 2083 ctx1 = repo[node1]
2079 2084 ctx2 = repo[node2]
2080 2085
2081 2086 if not changes:
2082 2087 changes = repo.status(ctx1, ctx2, match=match)
2083 2088 modified, added, removed = changes[:3]
2084 2089
2085 2090 if not modified and not added and not removed:
2086 2091 return []
2087 2092
2088 2093 if repo.ui.debugflag:
2089 2094 hexfunc = hex
2090 2095 else:
2091 2096 hexfunc = short
2092 2097 revs = [hexfunc(node) for node in [ctx1.node(), ctx2.node()] if node]
2093 2098
2094 2099 copy = {}
2095 2100 if opts.git or opts.upgrade:
2096 2101 copy = copies.pathcopies(ctx1, ctx2)
2097 2102
2098 2103 def difffn(opts, losedata):
2099 2104 return trydiff(repo, revs, ctx1, ctx2, modified, added, removed,
2100 2105 copy, getfilectx, opts, losedata, prefix)
2101 2106 if opts.upgrade and not opts.git:
2102 2107 try:
2103 2108 def losedata(fn):
2104 2109 if not losedatafn or not losedatafn(fn=fn):
2105 2110 raise GitDiffRequired
2106 2111 # Buffer the whole output until we are sure it can be generated
2107 2112 return list(difffn(opts.copy(git=False), losedata))
2108 2113 except GitDiffRequired:
2109 2114 return difffn(opts.copy(git=True), None)
2110 2115 else:
2111 2116 return difffn(opts, None)
2112 2117
2113 2118 def difflabel(func, *args, **kw):
2114 2119 '''yields 2-tuples of (output, label) based on the output of func()'''
2115 2120 headprefixes = [('diff', 'diff.diffline'),
2116 2121 ('copy', 'diff.extended'),
2117 2122 ('rename', 'diff.extended'),
2118 2123 ('old', 'diff.extended'),
2119 2124 ('new', 'diff.extended'),
2120 2125 ('deleted', 'diff.extended'),
2121 2126 ('---', 'diff.file_a'),
2122 2127 ('+++', 'diff.file_b')]
2123 2128 textprefixes = [('@', 'diff.hunk'),
2124 2129 ('-', 'diff.deleted'),
2125 2130 ('+', 'diff.inserted')]
2126 2131 head = False
2127 2132 for chunk in func(*args, **kw):
2128 2133 lines = chunk.split('\n')
2129 2134 for i, line in enumerate(lines):
2130 2135 if i != 0:
2131 2136 yield ('\n', '')
2132 2137 if head:
2133 2138 if line.startswith('@'):
2134 2139 head = False
2135 2140 else:
2136 2141 if line and line[0] not in ' +-@\\':
2137 2142 head = True
2138 2143 stripline = line
2139 2144 diffline = False
2140 2145 if not head and line and line[0] in '+-':
2141 2146 # highlight tabs and trailing whitespace, but only in
2142 2147 # changed lines
2143 2148 stripline = line.rstrip()
2144 2149 diffline = True
2145 2150
2146 2151 prefixes = textprefixes
2147 2152 if head:
2148 2153 prefixes = headprefixes
2149 2154 for prefix, label in prefixes:
2150 2155 if stripline.startswith(prefix):
2151 2156 if diffline:
2152 2157 for token in tabsplitter.findall(stripline):
2153 2158 if '\t' == token[0]:
2154 2159 yield (token, 'diff.tab')
2155 2160 else:
2156 2161 yield (token, label)
2157 2162 else:
2158 2163 yield (stripline, label)
2159 2164 break
2160 2165 else:
2161 2166 yield (line, '')
2162 2167 if line != stripline:
2163 2168 yield (line[len(stripline):], 'diff.trailingwhitespace')
2164 2169
2165 2170 def diffui(*args, **kw):
2166 2171 '''like diff(), but yields 2-tuples of (output, label) for ui.write()'''
2167 2172 return difflabel(diff, *args, **kw)
2168 2173
2169 2174 def _filepairs(ctx1, modified, added, removed, copy, opts):
2170 2175 '''generates tuples (f1, f2, copyop), where f1 is the name of the file
2171 2176 before and f2 is the the name after. For added files, f1 will be None,
2172 2177 and for removed files, f2 will be None. copyop may be set to None, 'copy'
2173 2178 or 'rename' (the latter two only if opts.git is set).'''
2174 2179 gone = set()
2175 2180
2176 2181 copyto = dict([(v, k) for k, v in copy.items()])
2177 2182
2178 2183 addedset, removedset = set(added), set(removed)
2179 2184 # Fix up added, since merged-in additions appear as
2180 2185 # modifications during merges
2181 2186 for f in modified:
2182 2187 if f not in ctx1:
2183 2188 addedset.add(f)
2184 2189
2185 2190 for f in sorted(modified + added + removed):
2186 2191 copyop = None
2187 2192 f1, f2 = f, f
2188 2193 if f in addedset:
2189 2194 f1 = None
2190 2195 if f in copy:
2191 2196 if opts.git:
2192 2197 f1 = copy[f]
2193 2198 if f1 in removedset and f1 not in gone:
2194 2199 copyop = 'rename'
2195 2200 gone.add(f1)
2196 2201 else:
2197 2202 copyop = 'copy'
2198 2203 elif f in removedset:
2199 2204 f2 = None
2200 2205 if opts.git:
2201 2206 # have we already reported a copy above?
2202 2207 if (f in copyto and copyto[f] in addedset
2203 2208 and copy[copyto[f]] == f):
2204 2209 continue
2205 2210 yield f1, f2, copyop
2206 2211
2207 2212 def trydiff(repo, revs, ctx1, ctx2, modified, added, removed,
2208 2213 copy, getfilectx, opts, losedatafn, prefix):
2209 2214
2210 2215 def gitindex(text):
2211 2216 if not text:
2212 2217 text = ""
2213 2218 l = len(text)
2214 2219 s = util.sha1('blob %d\0' % l)
2215 2220 s.update(text)
2216 2221 return s.hexdigest()
2217 2222
2218 2223 if opts.noprefix:
2219 2224 aprefix = bprefix = ''
2220 2225 else:
2221 2226 aprefix = 'a/'
2222 2227 bprefix = 'b/'
2223 2228
2224 2229 def diffline(f, revs):
2225 2230 revinfo = ' '.join(["-r %s" % rev for rev in revs])
2226 2231 return 'diff %s %s' % (revinfo, f)
2227 2232
2228 2233 date1 = util.datestr(ctx1.date())
2229 2234 date2 = util.datestr(ctx2.date())
2230 2235
2231 2236 gitmode = {'l': '120000', 'x': '100755', '': '100644'}
2232 2237
2233 2238 for f1, f2, copyop in _filepairs(
2234 2239 ctx1, modified, added, removed, copy, opts):
2235 2240 content1 = None
2236 2241 content2 = None
2237 2242 flag1 = None
2238 2243 flag2 = None
2239 2244 if f1:
2240 2245 content1 = getfilectx(f1, ctx1).data()
2241 2246 if opts.git or losedatafn:
2242 2247 flag1 = ctx1.flags(f1)
2243 2248 if f2:
2244 2249 content2 = getfilectx(f2, ctx2).data()
2245 2250 if opts.git or losedatafn:
2246 2251 flag2 = ctx2.flags(f2)
2247 2252 binary = False
2248 2253 if opts.git or losedatafn:
2249 2254 binary = util.binary(content1) or util.binary(content2)
2250 2255
2251 2256 if losedatafn and not opts.git:
2252 2257 if (binary or
2253 2258 # copy/rename
2254 2259 f2 in copy or
2255 2260 # empty file creation
2256 2261 (not f1 and not content2) or
2257 2262 # empty file deletion
2258 2263 (not content1 and not f2) or
2259 2264 # create with flags
2260 2265 (not f1 and flag2) or
2261 2266 # change flags
2262 2267 (f1 and f2 and flag1 != flag2)):
2263 2268 losedatafn(f2 or f1)
2264 2269
2265 2270 path1 = posixpath.join(prefix, f1 or f2)
2266 2271 path2 = posixpath.join(prefix, f2 or f1)
2267 2272 header = []
2268 2273 if opts.git:
2269 2274 header.append('diff --git %s%s %s%s' %
2270 2275 (aprefix, path1, bprefix, path2))
2271 2276 if not f1: # added
2272 2277 header.append('new file mode %s' % gitmode[flag2])
2273 2278 elif not f2: # removed
2274 2279 header.append('deleted file mode %s' % gitmode[flag1])
2275 2280 else: # modified/copied/renamed
2276 2281 mode1, mode2 = gitmode[flag1], gitmode[flag2]
2277 2282 if mode1 != mode2:
2278 2283 header.append('old mode %s' % mode1)
2279 2284 header.append('new mode %s' % mode2)
2280 2285 if copyop is not None:
2281 2286 header.append('%s from %s' % (copyop, path1))
2282 2287 header.append('%s to %s' % (copyop, path2))
2283 2288 elif revs and not repo.ui.quiet:
2284 2289 header.append(diffline(path1, revs))
2285 2290
2286 2291 if binary and opts.git and not opts.nobinary:
2287 2292 text = mdiff.b85diff(content1, content2)
2288 2293 if text:
2289 2294 header.append('index %s..%s' %
2290 2295 (gitindex(content1), gitindex(content2)))
2291 2296 else:
2292 2297 text = mdiff.unidiff(content1, date1,
2293 2298 content2, date2,
2294 2299 path1, path2, opts=opts)
2295 2300 if header and (text or len(header) > 1):
2296 2301 yield '\n'.join(header) + '\n'
2297 2302 if text:
2298 2303 yield text
2299 2304
2300 2305 def diffstatsum(stats):
2301 2306 maxfile, maxtotal, addtotal, removetotal, binary = 0, 0, 0, 0, False
2302 2307 for f, a, r, b in stats:
2303 2308 maxfile = max(maxfile, encoding.colwidth(f))
2304 2309 maxtotal = max(maxtotal, a + r)
2305 2310 addtotal += a
2306 2311 removetotal += r
2307 2312 binary = binary or b
2308 2313
2309 2314 return maxfile, maxtotal, addtotal, removetotal, binary
2310 2315
2311 2316 def diffstatdata(lines):
2312 2317 diffre = re.compile('^diff .*-r [a-z0-9]+\s(.*)$')
2313 2318
2314 2319 results = []
2315 2320 filename, adds, removes, isbinary = None, 0, 0, False
2316 2321
2317 2322 def addresult():
2318 2323 if filename:
2319 2324 results.append((filename, adds, removes, isbinary))
2320 2325
2321 2326 for line in lines:
2322 2327 if line.startswith('diff'):
2323 2328 addresult()
2324 2329 # set numbers to 0 anyway when starting new file
2325 2330 adds, removes, isbinary = 0, 0, False
2326 2331 if line.startswith('diff --git a/'):
2327 2332 filename = gitre.search(line).group(2)
2328 2333 elif line.startswith('diff -r'):
2329 2334 # format: "diff -r ... -r ... filename"
2330 2335 filename = diffre.search(line).group(1)
2331 2336 elif line.startswith('+') and not line.startswith('+++ '):
2332 2337 adds += 1
2333 2338 elif line.startswith('-') and not line.startswith('--- '):
2334 2339 removes += 1
2335 2340 elif (line.startswith('GIT binary patch') or
2336 2341 line.startswith('Binary file')):
2337 2342 isbinary = True
2338 2343 addresult()
2339 2344 return results
2340 2345
2341 2346 def diffstat(lines, width=80, git=False):
2342 2347 output = []
2343 2348 stats = diffstatdata(lines)
2344 2349 maxname, maxtotal, totaladds, totalremoves, hasbinary = diffstatsum(stats)
2345 2350
2346 2351 countwidth = len(str(maxtotal))
2347 2352 if hasbinary and countwidth < 3:
2348 2353 countwidth = 3
2349 2354 graphwidth = width - countwidth - maxname - 6
2350 2355 if graphwidth < 10:
2351 2356 graphwidth = 10
2352 2357
2353 2358 def scale(i):
2354 2359 if maxtotal <= graphwidth:
2355 2360 return i
2356 2361 # If diffstat runs out of room it doesn't print anything,
2357 2362 # which isn't very useful, so always print at least one + or -
2358 2363 # if there were at least some changes.
2359 2364 return max(i * graphwidth // maxtotal, int(bool(i)))
2360 2365
2361 2366 for filename, adds, removes, isbinary in stats:
2362 2367 if isbinary:
2363 2368 count = 'Bin'
2364 2369 else:
2365 2370 count = adds + removes
2366 2371 pluses = '+' * scale(adds)
2367 2372 minuses = '-' * scale(removes)
2368 2373 output.append(' %s%s | %*s %s%s\n' %
2369 2374 (filename, ' ' * (maxname - encoding.colwidth(filename)),
2370 2375 countwidth, count, pluses, minuses))
2371 2376
2372 2377 if stats:
2373 2378 output.append(_(' %d files changed, %d insertions(+), '
2374 2379 '%d deletions(-)\n')
2375 2380 % (len(stats), totaladds, totalremoves))
2376 2381
2377 2382 return ''.join(output)
2378 2383
2379 2384 def diffstatui(*args, **kw):
2380 2385 '''like diffstat(), but yields 2-tuples of (output, label) for
2381 2386 ui.write()
2382 2387 '''
2383 2388
2384 2389 for line in diffstat(*args, **kw).splitlines():
2385 2390 if line and line[-1] in '+-':
2386 2391 name, graph = line.rsplit(' ', 1)
2387 2392 yield (name + ' ', '')
2388 2393 m = re.search(r'\++', graph)
2389 2394 if m:
2390 2395 yield (m.group(0), 'diffstat.inserted')
2391 2396 m = re.search(r'-+', graph)
2392 2397 if m:
2393 2398 yield (m.group(0), 'diffstat.deleted')
2394 2399 else:
2395 2400 yield (line, '')
2396 2401 yield ('\n', '')
General Comments 0
You need to be logged in to leave comments. Login now