##// END OF EJS Templates
record: add an operation arguments to customize recording ui...
Laurent Charignon -
r25310:c1f5ef76 default
parent child Browse files
Show More
@@ -1,3345 +1,3351 b''
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import hex, nullid, nullrev, short
9 9 from i18n import _
10 10 import os, sys, errno, re, tempfile, cStringIO, shutil
11 11 import util, scmutil, templater, patch, error, templatekw, revlog, copies
12 12 import match as matchmod
13 13 import context, repair, graphmod, revset, phases, obsolete, pathutil
14 14 import changelog
15 15 import bookmarks
16 16 import encoding
17 17 import crecord as crecordmod
18 18 import lock as lockmod
19 19
20 20 def ishunk(x):
21 21 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
22 22 return isinstance(x, hunkclasses)
23 23
24 24 def newandmodified(chunks, originalchunks):
25 25 newlyaddedandmodifiedfiles = set()
26 26 for chunk in chunks:
27 27 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
28 28 originalchunks:
29 29 newlyaddedandmodifiedfiles.add(chunk.header.filename())
30 30 return newlyaddedandmodifiedfiles
31 31
32 32 def parsealiases(cmd):
33 33 return cmd.lstrip("^").split("|")
34 34
35 35 def setupwrapcolorwrite(ui):
36 36 # wrap ui.write so diff output can be labeled/colorized
37 37 def wrapwrite(orig, *args, **kw):
38 38 label = kw.pop('label', '')
39 39 for chunk, l in patch.difflabel(lambda: args):
40 40 orig(chunk, label=label + l)
41 41
42 42 oldwrite = ui.write
43 43 def wrap(*args, **kwargs):
44 44 return wrapwrite(oldwrite, *args, **kwargs)
45 45 setattr(ui, 'write', wrap)
46 46 return oldwrite
47 47
48 def filterchunks(ui, originalhunks, usecurses, testfile):
48 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
49 49 if usecurses:
50 50 if testfile:
51 51 recordfn = crecordmod.testdecorator(testfile,
52 52 crecordmod.testchunkselector)
53 53 else:
54 54 recordfn = crecordmod.chunkselector
55 55
56 return crecordmod.filterpatch(ui, originalhunks, recordfn)
56 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
57 57
58 58 else:
59 return patch.filterpatch(ui, originalhunks)
60
61 def recordfilter(ui, originalhunks):
59 return patch.filterpatch(ui, originalhunks, operation)
60
61 def recordfilter(ui, originalhunks, operation=None):
62 """ Prompts the user to filter the originalhunks and return a list of
63 selected hunks.
64 *operation* is used for ui purposes to indicate the user
65 what kind of filtering they are doing: reverting, commiting, shelving, etc.
66 """
62 67 usecurses = ui.configbool('experimental', 'crecord', False)
63 68 testfile = ui.config('experimental', 'crecordtest', None)
64 69 oldwrite = setupwrapcolorwrite(ui)
65 70 try:
66 newchunks = filterchunks(ui, originalhunks, usecurses, testfile)
71 newchunks = filterchunks(ui, originalhunks, usecurses, testfile,
72 operation)
67 73 finally:
68 74 ui.write = oldwrite
69 75 return newchunks
70 76
71 77 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
72 78 filterfn, *pats, **opts):
73 79 import merge as mergemod
74 80
75 81 if not ui.interactive():
76 82 raise util.Abort(_('running non-interactively, use %s instead') %
77 83 cmdsuggest)
78 84
79 85 # make sure username is set before going interactive
80 86 if not opts.get('user'):
81 87 ui.username() # raise exception, username not provided
82 88
83 89 def recordfunc(ui, repo, message, match, opts):
84 90 """This is generic record driver.
85 91
86 92 Its job is to interactively filter local changes, and
87 93 accordingly prepare working directory into a state in which the
88 94 job can be delegated to a non-interactive commit command such as
89 95 'commit' or 'qrefresh'.
90 96
91 97 After the actual job is done by non-interactive command, the
92 98 working directory is restored to its original state.
93 99
94 100 In the end we'll record interesting changes, and everything else
95 101 will be left in place, so the user can continue working.
96 102 """
97 103
98 104 checkunfinished(repo, commit=True)
99 105 merge = len(repo[None].parents()) > 1
100 106 if merge:
101 107 raise util.Abort(_('cannot partially commit a merge '
102 108 '(use "hg commit" instead)'))
103 109
104 110 status = repo.status(match=match)
105 111 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
106 112 diffopts.nodates = True
107 113 diffopts.git = True
108 114 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
109 115 originalchunks = patch.parsepatch(originaldiff)
110 116
111 117 # 1. filter patch, so we have intending-to apply subset of it
112 118 try:
113 119 chunks = filterfn(ui, originalchunks)
114 120 except patch.PatchError, err:
115 121 raise util.Abort(_('error parsing patch: %s') % err)
116 122
117 123 # We need to keep a backup of files that have been newly added and
118 124 # modified during the recording process because there is a previous
119 125 # version without the edit in the workdir
120 126 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
121 127 contenders = set()
122 128 for h in chunks:
123 129 try:
124 130 contenders.update(set(h.files()))
125 131 except AttributeError:
126 132 pass
127 133
128 134 changed = status.modified + status.added + status.removed
129 135 newfiles = [f for f in changed if f in contenders]
130 136 if not newfiles:
131 137 ui.status(_('no changes to record\n'))
132 138 return 0
133 139
134 140 modified = set(status.modified)
135 141
136 142 # 2. backup changed files, so we can restore them in the end
137 143
138 144 if backupall:
139 145 tobackup = changed
140 146 else:
141 147 tobackup = [f for f in newfiles if f in modified or f in \
142 148 newlyaddedandmodifiedfiles]
143 149 backups = {}
144 150 if tobackup:
145 151 backupdir = repo.join('record-backups')
146 152 try:
147 153 os.mkdir(backupdir)
148 154 except OSError, err:
149 155 if err.errno != errno.EEXIST:
150 156 raise
151 157 try:
152 158 # backup continues
153 159 for f in tobackup:
154 160 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
155 161 dir=backupdir)
156 162 os.close(fd)
157 163 ui.debug('backup %r as %r\n' % (f, tmpname))
158 164 util.copyfile(repo.wjoin(f), tmpname)
159 165 shutil.copystat(repo.wjoin(f), tmpname)
160 166 backups[f] = tmpname
161 167
162 168 fp = cStringIO.StringIO()
163 169 for c in chunks:
164 170 fname = c.filename()
165 171 if fname in backups:
166 172 c.write(fp)
167 173 dopatch = fp.tell()
168 174 fp.seek(0)
169 175
170 176 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
171 177 # 3a. apply filtered patch to clean repo (clean)
172 178 if backups:
173 179 # Equivalent to hg.revert
174 180 choices = lambda key: key in backups
175 181 mergemod.update(repo, repo.dirstate.p1(),
176 182 False, True, choices)
177 183
178 184 # 3b. (apply)
179 185 if dopatch:
180 186 try:
181 187 ui.debug('applying patch\n')
182 188 ui.debug(fp.getvalue())
183 189 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
184 190 except patch.PatchError, err:
185 191 raise util.Abort(str(err))
186 192 del fp
187 193
188 194 # 4. We prepared working directory according to filtered
189 195 # patch. Now is the time to delegate the job to
190 196 # commit/qrefresh or the like!
191 197
192 198 # Make all of the pathnames absolute.
193 199 newfiles = [repo.wjoin(nf) for nf in newfiles]
194 200 return commitfunc(ui, repo, *newfiles, **opts)
195 201 finally:
196 202 # 5. finally restore backed-up files
197 203 try:
198 204 for realname, tmpname in backups.iteritems():
199 205 ui.debug('restoring %r to %r\n' % (tmpname, realname))
200 206 util.copyfile(tmpname, repo.wjoin(realname))
201 207 # Our calls to copystat() here and above are a
202 208 # hack to trick any editors that have f open that
203 209 # we haven't modified them.
204 210 #
205 211 # Also note that this racy as an editor could
206 212 # notice the file's mtime before we've finished
207 213 # writing it.
208 214 shutil.copystat(tmpname, repo.wjoin(realname))
209 215 os.unlink(tmpname)
210 216 if tobackup:
211 217 os.rmdir(backupdir)
212 218 except OSError:
213 219 pass
214 220
215 221 return commit(ui, repo, recordfunc, pats, opts)
216 222
217 223 def findpossible(cmd, table, strict=False):
218 224 """
219 225 Return cmd -> (aliases, command table entry)
220 226 for each matching command.
221 227 Return debug commands (or their aliases) only if no normal command matches.
222 228 """
223 229 choice = {}
224 230 debugchoice = {}
225 231
226 232 if cmd in table:
227 233 # short-circuit exact matches, "log" alias beats "^log|history"
228 234 keys = [cmd]
229 235 else:
230 236 keys = table.keys()
231 237
232 238 allcmds = []
233 239 for e in keys:
234 240 aliases = parsealiases(e)
235 241 allcmds.extend(aliases)
236 242 found = None
237 243 if cmd in aliases:
238 244 found = cmd
239 245 elif not strict:
240 246 for a in aliases:
241 247 if a.startswith(cmd):
242 248 found = a
243 249 break
244 250 if found is not None:
245 251 if aliases[0].startswith("debug") or found.startswith("debug"):
246 252 debugchoice[found] = (aliases, table[e])
247 253 else:
248 254 choice[found] = (aliases, table[e])
249 255
250 256 if not choice and debugchoice:
251 257 choice = debugchoice
252 258
253 259 return choice, allcmds
254 260
255 261 def findcmd(cmd, table, strict=True):
256 262 """Return (aliases, command table entry) for command string."""
257 263 choice, allcmds = findpossible(cmd, table, strict)
258 264
259 265 if cmd in choice:
260 266 return choice[cmd]
261 267
262 268 if len(choice) > 1:
263 269 clist = choice.keys()
264 270 clist.sort()
265 271 raise error.AmbiguousCommand(cmd, clist)
266 272
267 273 if choice:
268 274 return choice.values()[0]
269 275
270 276 raise error.UnknownCommand(cmd, allcmds)
271 277
272 278 def findrepo(p):
273 279 while not os.path.isdir(os.path.join(p, ".hg")):
274 280 oldp, p = p, os.path.dirname(p)
275 281 if p == oldp:
276 282 return None
277 283
278 284 return p
279 285
280 286 def bailifchanged(repo, merge=True):
281 287 if merge and repo.dirstate.p2() != nullid:
282 288 raise util.Abort(_('outstanding uncommitted merge'))
283 289 modified, added, removed, deleted = repo.status()[:4]
284 290 if modified or added or removed or deleted:
285 291 raise util.Abort(_('uncommitted changes'))
286 292 ctx = repo[None]
287 293 for s in sorted(ctx.substate):
288 294 ctx.sub(s).bailifchanged()
289 295
290 296 def logmessage(ui, opts):
291 297 """ get the log message according to -m and -l option """
292 298 message = opts.get('message')
293 299 logfile = opts.get('logfile')
294 300
295 301 if message and logfile:
296 302 raise util.Abort(_('options --message and --logfile are mutually '
297 303 'exclusive'))
298 304 if not message and logfile:
299 305 try:
300 306 if logfile == '-':
301 307 message = ui.fin.read()
302 308 else:
303 309 message = '\n'.join(util.readfile(logfile).splitlines())
304 310 except IOError, inst:
305 311 raise util.Abort(_("can't read commit message '%s': %s") %
306 312 (logfile, inst.strerror))
307 313 return message
308 314
309 315 def mergeeditform(ctxorbool, baseformname):
310 316 """return appropriate editform name (referencing a committemplate)
311 317
312 318 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
313 319 merging is committed.
314 320
315 321 This returns baseformname with '.merge' appended if it is a merge,
316 322 otherwise '.normal' is appended.
317 323 """
318 324 if isinstance(ctxorbool, bool):
319 325 if ctxorbool:
320 326 return baseformname + ".merge"
321 327 elif 1 < len(ctxorbool.parents()):
322 328 return baseformname + ".merge"
323 329
324 330 return baseformname + ".normal"
325 331
326 332 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
327 333 editform='', **opts):
328 334 """get appropriate commit message editor according to '--edit' option
329 335
330 336 'finishdesc' is a function to be called with edited commit message
331 337 (= 'description' of the new changeset) just after editing, but
332 338 before checking empty-ness. It should return actual text to be
333 339 stored into history. This allows to change description before
334 340 storing.
335 341
336 342 'extramsg' is a extra message to be shown in the editor instead of
337 343 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
338 344 is automatically added.
339 345
340 346 'editform' is a dot-separated list of names, to distinguish
341 347 the purpose of commit text editing.
342 348
343 349 'getcommiteditor' returns 'commitforceeditor' regardless of
344 350 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
345 351 they are specific for usage in MQ.
346 352 """
347 353 if edit or finishdesc or extramsg:
348 354 return lambda r, c, s: commitforceeditor(r, c, s,
349 355 finishdesc=finishdesc,
350 356 extramsg=extramsg,
351 357 editform=editform)
352 358 elif editform:
353 359 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
354 360 else:
355 361 return commiteditor
356 362
357 363 def loglimit(opts):
358 364 """get the log limit according to option -l/--limit"""
359 365 limit = opts.get('limit')
360 366 if limit:
361 367 try:
362 368 limit = int(limit)
363 369 except ValueError:
364 370 raise util.Abort(_('limit must be a positive integer'))
365 371 if limit <= 0:
366 372 raise util.Abort(_('limit must be positive'))
367 373 else:
368 374 limit = None
369 375 return limit
370 376
371 377 def makefilename(repo, pat, node, desc=None,
372 378 total=None, seqno=None, revwidth=None, pathname=None):
373 379 node_expander = {
374 380 'H': lambda: hex(node),
375 381 'R': lambda: str(repo.changelog.rev(node)),
376 382 'h': lambda: short(node),
377 383 'm': lambda: re.sub('[^\w]', '_', str(desc))
378 384 }
379 385 expander = {
380 386 '%': lambda: '%',
381 387 'b': lambda: os.path.basename(repo.root),
382 388 }
383 389
384 390 try:
385 391 if node:
386 392 expander.update(node_expander)
387 393 if node:
388 394 expander['r'] = (lambda:
389 395 str(repo.changelog.rev(node)).zfill(revwidth or 0))
390 396 if total is not None:
391 397 expander['N'] = lambda: str(total)
392 398 if seqno is not None:
393 399 expander['n'] = lambda: str(seqno)
394 400 if total is not None and seqno is not None:
395 401 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
396 402 if pathname is not None:
397 403 expander['s'] = lambda: os.path.basename(pathname)
398 404 expander['d'] = lambda: os.path.dirname(pathname) or '.'
399 405 expander['p'] = lambda: pathname
400 406
401 407 newname = []
402 408 patlen = len(pat)
403 409 i = 0
404 410 while i < patlen:
405 411 c = pat[i]
406 412 if c == '%':
407 413 i += 1
408 414 c = pat[i]
409 415 c = expander[c]()
410 416 newname.append(c)
411 417 i += 1
412 418 return ''.join(newname)
413 419 except KeyError, inst:
414 420 raise util.Abort(_("invalid format spec '%%%s' in output filename") %
415 421 inst.args[0])
416 422
417 423 def makefileobj(repo, pat, node=None, desc=None, total=None,
418 424 seqno=None, revwidth=None, mode='wb', modemap=None,
419 425 pathname=None):
420 426
421 427 writable = mode not in ('r', 'rb')
422 428
423 429 if not pat or pat == '-':
424 430 if writable:
425 431 fp = repo.ui.fout
426 432 else:
427 433 fp = repo.ui.fin
428 434 if util.safehasattr(fp, 'fileno'):
429 435 return os.fdopen(os.dup(fp.fileno()), mode)
430 436 else:
431 437 # if this fp can't be duped properly, return
432 438 # a dummy object that can be closed
433 439 class wrappedfileobj(object):
434 440 noop = lambda x: None
435 441 def __init__(self, f):
436 442 self.f = f
437 443 def __getattr__(self, attr):
438 444 if attr == 'close':
439 445 return self.noop
440 446 else:
441 447 return getattr(self.f, attr)
442 448
443 449 return wrappedfileobj(fp)
444 450 if util.safehasattr(pat, 'write') and writable:
445 451 return pat
446 452 if util.safehasattr(pat, 'read') and 'r' in mode:
447 453 return pat
448 454 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
449 455 if modemap is not None:
450 456 mode = modemap.get(fn, mode)
451 457 if mode == 'wb':
452 458 modemap[fn] = 'ab'
453 459 return open(fn, mode)
454 460
455 461 def openrevlog(repo, cmd, file_, opts):
456 462 """opens the changelog, manifest, a filelog or a given revlog"""
457 463 cl = opts['changelog']
458 464 mf = opts['manifest']
459 465 dir = opts['dir']
460 466 msg = None
461 467 if cl and mf:
462 468 msg = _('cannot specify --changelog and --manifest at the same time')
463 469 elif cl and dir:
464 470 msg = _('cannot specify --changelog and --dir at the same time')
465 471 elif cl or mf:
466 472 if file_:
467 473 msg = _('cannot specify filename with --changelog or --manifest')
468 474 elif not repo:
469 475 msg = _('cannot specify --changelog or --manifest or --dir '
470 476 'without a repository')
471 477 if msg:
472 478 raise util.Abort(msg)
473 479
474 480 r = None
475 481 if repo:
476 482 if cl:
477 483 r = repo.unfiltered().changelog
478 484 elif dir:
479 485 if 'treemanifest' not in repo.requirements:
480 486 raise util.Abort(_("--dir can only be used on repos with "
481 487 "treemanifest enabled"))
482 488 dirlog = repo.dirlog(file_)
483 489 if len(dirlog):
484 490 r = dirlog
485 491 elif mf:
486 492 r = repo.manifest
487 493 elif file_:
488 494 filelog = repo.file(file_)
489 495 if len(filelog):
490 496 r = filelog
491 497 if not r:
492 498 if not file_:
493 499 raise error.CommandError(cmd, _('invalid arguments'))
494 500 if not os.path.isfile(file_):
495 501 raise util.Abort(_("revlog '%s' not found") % file_)
496 502 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
497 503 file_[:-2] + ".i")
498 504 return r
499 505
500 506 def copy(ui, repo, pats, opts, rename=False):
501 507 # called with the repo lock held
502 508 #
503 509 # hgsep => pathname that uses "/" to separate directories
504 510 # ossep => pathname that uses os.sep to separate directories
505 511 cwd = repo.getcwd()
506 512 targets = {}
507 513 after = opts.get("after")
508 514 dryrun = opts.get("dry_run")
509 515 wctx = repo[None]
510 516
511 517 def walkpat(pat):
512 518 srcs = []
513 519 if after:
514 520 badstates = '?'
515 521 else:
516 522 badstates = '?r'
517 523 m = scmutil.match(repo[None], [pat], opts, globbed=True)
518 524 for abs in repo.walk(m):
519 525 state = repo.dirstate[abs]
520 526 rel = m.rel(abs)
521 527 exact = m.exact(abs)
522 528 if state in badstates:
523 529 if exact and state == '?':
524 530 ui.warn(_('%s: not copying - file is not managed\n') % rel)
525 531 if exact and state == 'r':
526 532 ui.warn(_('%s: not copying - file has been marked for'
527 533 ' remove\n') % rel)
528 534 continue
529 535 # abs: hgsep
530 536 # rel: ossep
531 537 srcs.append((abs, rel, exact))
532 538 return srcs
533 539
534 540 # abssrc: hgsep
535 541 # relsrc: ossep
536 542 # otarget: ossep
537 543 def copyfile(abssrc, relsrc, otarget, exact):
538 544 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
539 545 if '/' in abstarget:
540 546 # We cannot normalize abstarget itself, this would prevent
541 547 # case only renames, like a => A.
542 548 abspath, absname = abstarget.rsplit('/', 1)
543 549 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
544 550 reltarget = repo.pathto(abstarget, cwd)
545 551 target = repo.wjoin(abstarget)
546 552 src = repo.wjoin(abssrc)
547 553 state = repo.dirstate[abstarget]
548 554
549 555 scmutil.checkportable(ui, abstarget)
550 556
551 557 # check for collisions
552 558 prevsrc = targets.get(abstarget)
553 559 if prevsrc is not None:
554 560 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
555 561 (reltarget, repo.pathto(abssrc, cwd),
556 562 repo.pathto(prevsrc, cwd)))
557 563 return
558 564
559 565 # check for overwrites
560 566 exists = os.path.lexists(target)
561 567 samefile = False
562 568 if exists and abssrc != abstarget:
563 569 if (repo.dirstate.normalize(abssrc) ==
564 570 repo.dirstate.normalize(abstarget)):
565 571 if not rename:
566 572 ui.warn(_("%s: can't copy - same file\n") % reltarget)
567 573 return
568 574 exists = False
569 575 samefile = True
570 576
571 577 if not after and exists or after and state in 'mn':
572 578 if not opts['force']:
573 579 ui.warn(_('%s: not overwriting - file exists\n') %
574 580 reltarget)
575 581 return
576 582
577 583 if after:
578 584 if not exists:
579 585 if rename:
580 586 ui.warn(_('%s: not recording move - %s does not exist\n') %
581 587 (relsrc, reltarget))
582 588 else:
583 589 ui.warn(_('%s: not recording copy - %s does not exist\n') %
584 590 (relsrc, reltarget))
585 591 return
586 592 elif not dryrun:
587 593 try:
588 594 if exists:
589 595 os.unlink(target)
590 596 targetdir = os.path.dirname(target) or '.'
591 597 if not os.path.isdir(targetdir):
592 598 os.makedirs(targetdir)
593 599 if samefile:
594 600 tmp = target + "~hgrename"
595 601 os.rename(src, tmp)
596 602 os.rename(tmp, target)
597 603 else:
598 604 util.copyfile(src, target)
599 605 srcexists = True
600 606 except IOError, inst:
601 607 if inst.errno == errno.ENOENT:
602 608 ui.warn(_('%s: deleted in working directory\n') % relsrc)
603 609 srcexists = False
604 610 else:
605 611 ui.warn(_('%s: cannot copy - %s\n') %
606 612 (relsrc, inst.strerror))
607 613 return True # report a failure
608 614
609 615 if ui.verbose or not exact:
610 616 if rename:
611 617 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
612 618 else:
613 619 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
614 620
615 621 targets[abstarget] = abssrc
616 622
617 623 # fix up dirstate
618 624 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
619 625 dryrun=dryrun, cwd=cwd)
620 626 if rename and not dryrun:
621 627 if not after and srcexists and not samefile:
622 628 util.unlinkpath(repo.wjoin(abssrc))
623 629 wctx.forget([abssrc])
624 630
625 631 # pat: ossep
626 632 # dest ossep
627 633 # srcs: list of (hgsep, hgsep, ossep, bool)
628 634 # return: function that takes hgsep and returns ossep
629 635 def targetpathfn(pat, dest, srcs):
630 636 if os.path.isdir(pat):
631 637 abspfx = pathutil.canonpath(repo.root, cwd, pat)
632 638 abspfx = util.localpath(abspfx)
633 639 if destdirexists:
634 640 striplen = len(os.path.split(abspfx)[0])
635 641 else:
636 642 striplen = len(abspfx)
637 643 if striplen:
638 644 striplen += len(os.sep)
639 645 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
640 646 elif destdirexists:
641 647 res = lambda p: os.path.join(dest,
642 648 os.path.basename(util.localpath(p)))
643 649 else:
644 650 res = lambda p: dest
645 651 return res
646 652
647 653 # pat: ossep
648 654 # dest ossep
649 655 # srcs: list of (hgsep, hgsep, ossep, bool)
650 656 # return: function that takes hgsep and returns ossep
651 657 def targetpathafterfn(pat, dest, srcs):
652 658 if matchmod.patkind(pat):
653 659 # a mercurial pattern
654 660 res = lambda p: os.path.join(dest,
655 661 os.path.basename(util.localpath(p)))
656 662 else:
657 663 abspfx = pathutil.canonpath(repo.root, cwd, pat)
658 664 if len(abspfx) < len(srcs[0][0]):
659 665 # A directory. Either the target path contains the last
660 666 # component of the source path or it does not.
661 667 def evalpath(striplen):
662 668 score = 0
663 669 for s in srcs:
664 670 t = os.path.join(dest, util.localpath(s[0])[striplen:])
665 671 if os.path.lexists(t):
666 672 score += 1
667 673 return score
668 674
669 675 abspfx = util.localpath(abspfx)
670 676 striplen = len(abspfx)
671 677 if striplen:
672 678 striplen += len(os.sep)
673 679 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
674 680 score = evalpath(striplen)
675 681 striplen1 = len(os.path.split(abspfx)[0])
676 682 if striplen1:
677 683 striplen1 += len(os.sep)
678 684 if evalpath(striplen1) > score:
679 685 striplen = striplen1
680 686 res = lambda p: os.path.join(dest,
681 687 util.localpath(p)[striplen:])
682 688 else:
683 689 # a file
684 690 if destdirexists:
685 691 res = lambda p: os.path.join(dest,
686 692 os.path.basename(util.localpath(p)))
687 693 else:
688 694 res = lambda p: dest
689 695 return res
690 696
691 697 pats = scmutil.expandpats(pats)
692 698 if not pats:
693 699 raise util.Abort(_('no source or destination specified'))
694 700 if len(pats) == 1:
695 701 raise util.Abort(_('no destination specified'))
696 702 dest = pats.pop()
697 703 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
698 704 if not destdirexists:
699 705 if len(pats) > 1 or matchmod.patkind(pats[0]):
700 706 raise util.Abort(_('with multiple sources, destination must be an '
701 707 'existing directory'))
702 708 if util.endswithsep(dest):
703 709 raise util.Abort(_('destination %s is not a directory') % dest)
704 710
705 711 tfn = targetpathfn
706 712 if after:
707 713 tfn = targetpathafterfn
708 714 copylist = []
709 715 for pat in pats:
710 716 srcs = walkpat(pat)
711 717 if not srcs:
712 718 continue
713 719 copylist.append((tfn(pat, dest, srcs), srcs))
714 720 if not copylist:
715 721 raise util.Abort(_('no files to copy'))
716 722
717 723 errors = 0
718 724 for targetpath, srcs in copylist:
719 725 for abssrc, relsrc, exact in srcs:
720 726 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
721 727 errors += 1
722 728
723 729 if errors:
724 730 ui.warn(_('(consider using --after)\n'))
725 731
726 732 return errors != 0
727 733
728 734 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
729 735 runargs=None, appendpid=False):
730 736 '''Run a command as a service.'''
731 737
732 738 def writepid(pid):
733 739 if opts['pid_file']:
734 740 if appendpid:
735 741 mode = 'a'
736 742 else:
737 743 mode = 'w'
738 744 fp = open(opts['pid_file'], mode)
739 745 fp.write(str(pid) + '\n')
740 746 fp.close()
741 747
742 748 if opts['daemon'] and not opts['daemon_pipefds']:
743 749 # Signal child process startup with file removal
744 750 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
745 751 os.close(lockfd)
746 752 try:
747 753 if not runargs:
748 754 runargs = util.hgcmd() + sys.argv[1:]
749 755 runargs.append('--daemon-pipefds=%s' % lockpath)
750 756 # Don't pass --cwd to the child process, because we've already
751 757 # changed directory.
752 758 for i in xrange(1, len(runargs)):
753 759 if runargs[i].startswith('--cwd='):
754 760 del runargs[i]
755 761 break
756 762 elif runargs[i].startswith('--cwd'):
757 763 del runargs[i:i + 2]
758 764 break
759 765 def condfn():
760 766 return not os.path.exists(lockpath)
761 767 pid = util.rundetached(runargs, condfn)
762 768 if pid < 0:
763 769 raise util.Abort(_('child process failed to start'))
764 770 writepid(pid)
765 771 finally:
766 772 try:
767 773 os.unlink(lockpath)
768 774 except OSError, e:
769 775 if e.errno != errno.ENOENT:
770 776 raise
771 777 if parentfn:
772 778 return parentfn(pid)
773 779 else:
774 780 return
775 781
776 782 if initfn:
777 783 initfn()
778 784
779 785 if not opts['daemon']:
780 786 writepid(os.getpid())
781 787
782 788 if opts['daemon_pipefds']:
783 789 lockpath = opts['daemon_pipefds']
784 790 try:
785 791 os.setsid()
786 792 except AttributeError:
787 793 pass
788 794 os.unlink(lockpath)
789 795 util.hidewindow()
790 796 sys.stdout.flush()
791 797 sys.stderr.flush()
792 798
793 799 nullfd = os.open(os.devnull, os.O_RDWR)
794 800 logfilefd = nullfd
795 801 if logfile:
796 802 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
797 803 os.dup2(nullfd, 0)
798 804 os.dup2(logfilefd, 1)
799 805 os.dup2(logfilefd, 2)
800 806 if nullfd not in (0, 1, 2):
801 807 os.close(nullfd)
802 808 if logfile and logfilefd not in (0, 1, 2):
803 809 os.close(logfilefd)
804 810
805 811 if runfn:
806 812 return runfn()
807 813
808 814 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
809 815 """Utility function used by commands.import to import a single patch
810 816
811 817 This function is explicitly defined here to help the evolve extension to
812 818 wrap this part of the import logic.
813 819
814 820 The API is currently a bit ugly because it a simple code translation from
815 821 the import command. Feel free to make it better.
816 822
817 823 :hunk: a patch (as a binary string)
818 824 :parents: nodes that will be parent of the created commit
819 825 :opts: the full dict of option passed to the import command
820 826 :msgs: list to save commit message to.
821 827 (used in case we need to save it when failing)
822 828 :updatefunc: a function that update a repo to a given node
823 829 updatefunc(<repo>, <node>)
824 830 """
825 831 tmpname, message, user, date, branch, nodeid, p1, p2 = \
826 832 patch.extract(ui, hunk)
827 833
828 834 update = not opts.get('bypass')
829 835 strip = opts["strip"]
830 836 prefix = opts["prefix"]
831 837 sim = float(opts.get('similarity') or 0)
832 838 if not tmpname:
833 839 return (None, None, False)
834 840 msg = _('applied to working directory')
835 841
836 842 rejects = False
837 843 dsguard = None
838 844
839 845 try:
840 846 cmdline_message = logmessage(ui, opts)
841 847 if cmdline_message:
842 848 # pickup the cmdline msg
843 849 message = cmdline_message
844 850 elif message:
845 851 # pickup the patch msg
846 852 message = message.strip()
847 853 else:
848 854 # launch the editor
849 855 message = None
850 856 ui.debug('message:\n%s\n' % message)
851 857
852 858 if len(parents) == 1:
853 859 parents.append(repo[nullid])
854 860 if opts.get('exact'):
855 861 if not nodeid or not p1:
856 862 raise util.Abort(_('not a Mercurial patch'))
857 863 p1 = repo[p1]
858 864 p2 = repo[p2 or nullid]
859 865 elif p2:
860 866 try:
861 867 p1 = repo[p1]
862 868 p2 = repo[p2]
863 869 # Without any options, consider p2 only if the
864 870 # patch is being applied on top of the recorded
865 871 # first parent.
866 872 if p1 != parents[0]:
867 873 p1 = parents[0]
868 874 p2 = repo[nullid]
869 875 except error.RepoError:
870 876 p1, p2 = parents
871 877 if p2.node() == nullid:
872 878 ui.warn(_("warning: import the patch as a normal revision\n"
873 879 "(use --exact to import the patch as a merge)\n"))
874 880 else:
875 881 p1, p2 = parents
876 882
877 883 n = None
878 884 if update:
879 885 dsguard = dirstateguard(repo, 'tryimportone')
880 886 if p1 != parents[0]:
881 887 updatefunc(repo, p1.node())
882 888 if p2 != parents[1]:
883 889 repo.setparents(p1.node(), p2.node())
884 890
885 891 if opts.get('exact') or opts.get('import_branch'):
886 892 repo.dirstate.setbranch(branch or 'default')
887 893
888 894 partial = opts.get('partial', False)
889 895 files = set()
890 896 try:
891 897 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
892 898 files=files, eolmode=None, similarity=sim / 100.0)
893 899 except patch.PatchError, e:
894 900 if not partial:
895 901 raise util.Abort(str(e))
896 902 if partial:
897 903 rejects = True
898 904
899 905 files = list(files)
900 906 if opts.get('no_commit'):
901 907 if message:
902 908 msgs.append(message)
903 909 else:
904 910 if opts.get('exact') or p2:
905 911 # If you got here, you either use --force and know what
906 912 # you are doing or used --exact or a merge patch while
907 913 # being updated to its first parent.
908 914 m = None
909 915 else:
910 916 m = scmutil.matchfiles(repo, files or [])
911 917 editform = mergeeditform(repo[None], 'import.normal')
912 918 if opts.get('exact'):
913 919 editor = None
914 920 else:
915 921 editor = getcommiteditor(editform=editform, **opts)
916 922 allowemptyback = repo.ui.backupconfig('ui', 'allowemptycommit')
917 923 try:
918 924 if partial:
919 925 repo.ui.setconfig('ui', 'allowemptycommit', True)
920 926 n = repo.commit(message, opts.get('user') or user,
921 927 opts.get('date') or date, match=m,
922 928 editor=editor)
923 929 finally:
924 930 repo.ui.restoreconfig(allowemptyback)
925 931 dsguard.close()
926 932 else:
927 933 if opts.get('exact') or opts.get('import_branch'):
928 934 branch = branch or 'default'
929 935 else:
930 936 branch = p1.branch()
931 937 store = patch.filestore()
932 938 try:
933 939 files = set()
934 940 try:
935 941 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
936 942 files, eolmode=None)
937 943 except patch.PatchError, e:
938 944 raise util.Abort(str(e))
939 945 if opts.get('exact'):
940 946 editor = None
941 947 else:
942 948 editor = getcommiteditor(editform='import.bypass')
943 949 memctx = context.makememctx(repo, (p1.node(), p2.node()),
944 950 message,
945 951 opts.get('user') or user,
946 952 opts.get('date') or date,
947 953 branch, files, store,
948 954 editor=editor)
949 955 n = memctx.commit()
950 956 finally:
951 957 store.close()
952 958 if opts.get('exact') and opts.get('no_commit'):
953 959 # --exact with --no-commit is still useful in that it does merge
954 960 # and branch bits
955 961 ui.warn(_("warning: can't check exact import with --no-commit\n"))
956 962 elif opts.get('exact') and hex(n) != nodeid:
957 963 raise util.Abort(_('patch is damaged or loses information'))
958 964 if n:
959 965 # i18n: refers to a short changeset id
960 966 msg = _('created %s') % short(n)
961 967 return (msg, n, rejects)
962 968 finally:
963 969 lockmod.release(dsguard)
964 970 os.unlink(tmpname)
965 971
966 972 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
967 973 opts=None):
968 974 '''export changesets as hg patches.'''
969 975
970 976 total = len(revs)
971 977 revwidth = max([len(str(rev)) for rev in revs])
972 978 filemode = {}
973 979
974 980 def single(rev, seqno, fp):
975 981 ctx = repo[rev]
976 982 node = ctx.node()
977 983 parents = [p.node() for p in ctx.parents() if p]
978 984 branch = ctx.branch()
979 985 if switch_parent:
980 986 parents.reverse()
981 987
982 988 if parents:
983 989 prev = parents[0]
984 990 else:
985 991 prev = nullid
986 992
987 993 shouldclose = False
988 994 if not fp and len(template) > 0:
989 995 desc_lines = ctx.description().rstrip().split('\n')
990 996 desc = desc_lines[0] #Commit always has a first line.
991 997 fp = makefileobj(repo, template, node, desc=desc, total=total,
992 998 seqno=seqno, revwidth=revwidth, mode='wb',
993 999 modemap=filemode)
994 1000 if fp != template:
995 1001 shouldclose = True
996 1002 if fp and fp != sys.stdout and util.safehasattr(fp, 'name'):
997 1003 repo.ui.note("%s\n" % fp.name)
998 1004
999 1005 if not fp:
1000 1006 write = repo.ui.write
1001 1007 else:
1002 1008 def write(s, **kw):
1003 1009 fp.write(s)
1004 1010
1005 1011 write("# HG changeset patch\n")
1006 1012 write("# User %s\n" % ctx.user())
1007 1013 write("# Date %d %d\n" % ctx.date())
1008 1014 write("# %s\n" % util.datestr(ctx.date()))
1009 1015 if branch and branch != 'default':
1010 1016 write("# Branch %s\n" % branch)
1011 1017 write("# Node ID %s\n" % hex(node))
1012 1018 write("# Parent %s\n" % hex(prev))
1013 1019 if len(parents) > 1:
1014 1020 write("# Parent %s\n" % hex(parents[1]))
1015 1021 write(ctx.description().rstrip())
1016 1022 write("\n\n")
1017 1023
1018 1024 for chunk, label in patch.diffui(repo, prev, node, opts=opts):
1019 1025 write(chunk, label=label)
1020 1026
1021 1027 if shouldclose:
1022 1028 fp.close()
1023 1029
1024 1030 for seqno, rev in enumerate(revs):
1025 1031 single(rev, seqno + 1, fp)
1026 1032
1027 1033 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1028 1034 changes=None, stat=False, fp=None, prefix='',
1029 1035 root='', listsubrepos=False):
1030 1036 '''show diff or diffstat.'''
1031 1037 if fp is None:
1032 1038 write = ui.write
1033 1039 else:
1034 1040 def write(s, **kw):
1035 1041 fp.write(s)
1036 1042
1037 1043 if root:
1038 1044 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1039 1045 else:
1040 1046 relroot = ''
1041 1047 if relroot != '':
1042 1048 # XXX relative roots currently don't work if the root is within a
1043 1049 # subrepo
1044 1050 uirelroot = match.uipath(relroot)
1045 1051 relroot += '/'
1046 1052 for matchroot in match.files():
1047 1053 if not matchroot.startswith(relroot):
1048 1054 ui.warn(_('warning: %s not inside relative root %s\n') % (
1049 1055 match.uipath(matchroot), uirelroot))
1050 1056
1051 1057 if stat:
1052 1058 diffopts = diffopts.copy(context=0)
1053 1059 width = 80
1054 1060 if not ui.plain():
1055 1061 width = ui.termwidth()
1056 1062 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1057 1063 prefix=prefix, relroot=relroot)
1058 1064 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1059 1065 width=width,
1060 1066 git=diffopts.git):
1061 1067 write(chunk, label=label)
1062 1068 else:
1063 1069 for chunk, label in patch.diffui(repo, node1, node2, match,
1064 1070 changes, diffopts, prefix=prefix,
1065 1071 relroot=relroot):
1066 1072 write(chunk, label=label)
1067 1073
1068 1074 if listsubrepos:
1069 1075 ctx1 = repo[node1]
1070 1076 ctx2 = repo[node2]
1071 1077 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1072 1078 tempnode2 = node2
1073 1079 try:
1074 1080 if node2 is not None:
1075 1081 tempnode2 = ctx2.substate[subpath][1]
1076 1082 except KeyError:
1077 1083 # A subrepo that existed in node1 was deleted between node1 and
1078 1084 # node2 (inclusive). Thus, ctx2's substate won't contain that
1079 1085 # subpath. The best we can do is to ignore it.
1080 1086 tempnode2 = None
1081 1087 submatch = matchmod.narrowmatcher(subpath, match)
1082 1088 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1083 1089 stat=stat, fp=fp, prefix=prefix)
1084 1090
1085 1091 class changeset_printer(object):
1086 1092 '''show changeset information when templating not requested.'''
1087 1093
1088 1094 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1089 1095 self.ui = ui
1090 1096 self.repo = repo
1091 1097 self.buffered = buffered
1092 1098 self.matchfn = matchfn
1093 1099 self.diffopts = diffopts
1094 1100 self.header = {}
1095 1101 self.hunk = {}
1096 1102 self.lastheader = None
1097 1103 self.footer = None
1098 1104
1099 1105 def flush(self, rev):
1100 1106 if rev in self.header:
1101 1107 h = self.header[rev]
1102 1108 if h != self.lastheader:
1103 1109 self.lastheader = h
1104 1110 self.ui.write(h)
1105 1111 del self.header[rev]
1106 1112 if rev in self.hunk:
1107 1113 self.ui.write(self.hunk[rev])
1108 1114 del self.hunk[rev]
1109 1115 return 1
1110 1116 return 0
1111 1117
1112 1118 def close(self):
1113 1119 if self.footer:
1114 1120 self.ui.write(self.footer)
1115 1121
1116 1122 def show(self, ctx, copies=None, matchfn=None, **props):
1117 1123 if self.buffered:
1118 1124 self.ui.pushbuffer()
1119 1125 self._show(ctx, copies, matchfn, props)
1120 1126 self.hunk[ctx.rev()] = self.ui.popbuffer(labeled=True)
1121 1127 else:
1122 1128 self._show(ctx, copies, matchfn, props)
1123 1129
1124 1130 def _show(self, ctx, copies, matchfn, props):
1125 1131 '''show a single changeset or file revision'''
1126 1132 changenode = ctx.node()
1127 1133 rev = ctx.rev()
1128 1134 if self.ui.debugflag:
1129 1135 hexfunc = hex
1130 1136 else:
1131 1137 hexfunc = short
1132 1138 if rev is None:
1133 1139 pctx = ctx.p1()
1134 1140 revnode = (pctx.rev(), hexfunc(pctx.node()) + '+')
1135 1141 else:
1136 1142 revnode = (rev, hexfunc(changenode))
1137 1143
1138 1144 if self.ui.quiet:
1139 1145 self.ui.write("%d:%s\n" % revnode, label='log.node')
1140 1146 return
1141 1147
1142 1148 date = util.datestr(ctx.date())
1143 1149
1144 1150 # i18n: column positioning for "hg log"
1145 1151 self.ui.write(_("changeset: %d:%s\n") % revnode,
1146 1152 label='log.changeset changeset.%s' % ctx.phasestr())
1147 1153
1148 1154 # branches are shown first before any other names due to backwards
1149 1155 # compatibility
1150 1156 branch = ctx.branch()
1151 1157 # don't show the default branch name
1152 1158 if branch != 'default':
1153 1159 # i18n: column positioning for "hg log"
1154 1160 self.ui.write(_("branch: %s\n") % branch,
1155 1161 label='log.branch')
1156 1162
1157 1163 for name, ns in self.repo.names.iteritems():
1158 1164 # branches has special logic already handled above, so here we just
1159 1165 # skip it
1160 1166 if name == 'branches':
1161 1167 continue
1162 1168 # we will use the templatename as the color name since those two
1163 1169 # should be the same
1164 1170 for name in ns.names(self.repo, changenode):
1165 1171 self.ui.write(ns.logfmt % name,
1166 1172 label='log.%s' % ns.colorname)
1167 1173 if self.ui.debugflag:
1168 1174 # i18n: column positioning for "hg log"
1169 1175 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1170 1176 label='log.phase')
1171 1177 for pctx in self._meaningful_parentrevs(ctx):
1172 1178 label = 'log.parent changeset.%s' % pctx.phasestr()
1173 1179 # i18n: column positioning for "hg log"
1174 1180 self.ui.write(_("parent: %d:%s\n")
1175 1181 % (pctx.rev(), hexfunc(pctx.node())),
1176 1182 label=label)
1177 1183
1178 1184 if self.ui.debugflag and rev is not None:
1179 1185 mnode = ctx.manifestnode()
1180 1186 # i18n: column positioning for "hg log"
1181 1187 self.ui.write(_("manifest: %d:%s\n") %
1182 1188 (self.repo.manifest.rev(mnode), hex(mnode)),
1183 1189 label='ui.debug log.manifest')
1184 1190 # i18n: column positioning for "hg log"
1185 1191 self.ui.write(_("user: %s\n") % ctx.user(),
1186 1192 label='log.user')
1187 1193 # i18n: column positioning for "hg log"
1188 1194 self.ui.write(_("date: %s\n") % date,
1189 1195 label='log.date')
1190 1196
1191 1197 if self.ui.debugflag:
1192 1198 files = ctx.p1().status(ctx)[:3]
1193 1199 for key, value in zip([# i18n: column positioning for "hg log"
1194 1200 _("files:"),
1195 1201 # i18n: column positioning for "hg log"
1196 1202 _("files+:"),
1197 1203 # i18n: column positioning for "hg log"
1198 1204 _("files-:")], files):
1199 1205 if value:
1200 1206 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1201 1207 label='ui.debug log.files')
1202 1208 elif ctx.files() and self.ui.verbose:
1203 1209 # i18n: column positioning for "hg log"
1204 1210 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1205 1211 label='ui.note log.files')
1206 1212 if copies and self.ui.verbose:
1207 1213 copies = ['%s (%s)' % c for c in copies]
1208 1214 # i18n: column positioning for "hg log"
1209 1215 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1210 1216 label='ui.note log.copies')
1211 1217
1212 1218 extra = ctx.extra()
1213 1219 if extra and self.ui.debugflag:
1214 1220 for key, value in sorted(extra.items()):
1215 1221 # i18n: column positioning for "hg log"
1216 1222 self.ui.write(_("extra: %s=%s\n")
1217 1223 % (key, value.encode('string_escape')),
1218 1224 label='ui.debug log.extra')
1219 1225
1220 1226 description = ctx.description().strip()
1221 1227 if description:
1222 1228 if self.ui.verbose:
1223 1229 self.ui.write(_("description:\n"),
1224 1230 label='ui.note log.description')
1225 1231 self.ui.write(description,
1226 1232 label='ui.note log.description')
1227 1233 self.ui.write("\n\n")
1228 1234 else:
1229 1235 # i18n: column positioning for "hg log"
1230 1236 self.ui.write(_("summary: %s\n") %
1231 1237 description.splitlines()[0],
1232 1238 label='log.summary')
1233 1239 self.ui.write("\n")
1234 1240
1235 1241 self.showpatch(changenode, matchfn)
1236 1242
1237 1243 def showpatch(self, node, matchfn):
1238 1244 if not matchfn:
1239 1245 matchfn = self.matchfn
1240 1246 if matchfn:
1241 1247 stat = self.diffopts.get('stat')
1242 1248 diff = self.diffopts.get('patch')
1243 1249 diffopts = patch.diffallopts(self.ui, self.diffopts)
1244 1250 prev = self.repo.changelog.parents(node)[0]
1245 1251 if stat:
1246 1252 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1247 1253 match=matchfn, stat=True)
1248 1254 if diff:
1249 1255 if stat:
1250 1256 self.ui.write("\n")
1251 1257 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1252 1258 match=matchfn, stat=False)
1253 1259 self.ui.write("\n")
1254 1260
1255 1261 def _meaningful_parentrevs(self, ctx):
1256 1262 """Return list of meaningful (or all if debug) parentrevs for rev.
1257 1263
1258 1264 For merges (two non-nullrev revisions) both parents are meaningful.
1259 1265 Otherwise the first parent revision is considered meaningful if it
1260 1266 is not the preceding revision.
1261 1267 """
1262 1268 parents = ctx.parents()
1263 1269 if len(parents) > 1:
1264 1270 return parents
1265 1271 if self.ui.debugflag:
1266 1272 return [parents[0], self.repo['null']]
1267 1273 if parents[0].rev() >= scmutil.intrev(self.repo, ctx.rev()) - 1:
1268 1274 return []
1269 1275 return parents
1270 1276
1271 1277 class jsonchangeset(changeset_printer):
1272 1278 '''format changeset information.'''
1273 1279
1274 1280 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1275 1281 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1276 1282 self.cache = {}
1277 1283 self._first = True
1278 1284
1279 1285 def close(self):
1280 1286 if not self._first:
1281 1287 self.ui.write("\n]\n")
1282 1288 else:
1283 1289 self.ui.write("[]\n")
1284 1290
1285 1291 def _show(self, ctx, copies, matchfn, props):
1286 1292 '''show a single changeset or file revision'''
1287 1293 rev = ctx.rev()
1288 1294 if rev is None:
1289 1295 jrev = jnode = 'null'
1290 1296 else:
1291 1297 jrev = str(rev)
1292 1298 jnode = '"%s"' % hex(ctx.node())
1293 1299 j = encoding.jsonescape
1294 1300
1295 1301 if self._first:
1296 1302 self.ui.write("[\n {")
1297 1303 self._first = False
1298 1304 else:
1299 1305 self.ui.write(",\n {")
1300 1306
1301 1307 if self.ui.quiet:
1302 1308 self.ui.write('\n "rev": %s' % jrev)
1303 1309 self.ui.write(',\n "node": %s' % jnode)
1304 1310 self.ui.write('\n }')
1305 1311 return
1306 1312
1307 1313 self.ui.write('\n "rev": %s' % jrev)
1308 1314 self.ui.write(',\n "node": %s' % jnode)
1309 1315 self.ui.write(',\n "branch": "%s"' % j(ctx.branch()))
1310 1316 self.ui.write(',\n "phase": "%s"' % ctx.phasestr())
1311 1317 self.ui.write(',\n "user": "%s"' % j(ctx.user()))
1312 1318 self.ui.write(',\n "date": [%d, %d]' % ctx.date())
1313 1319 self.ui.write(',\n "desc": "%s"' % j(ctx.description()))
1314 1320
1315 1321 self.ui.write(',\n "bookmarks": [%s]' %
1316 1322 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1317 1323 self.ui.write(',\n "tags": [%s]' %
1318 1324 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1319 1325 self.ui.write(',\n "parents": [%s]' %
1320 1326 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1321 1327
1322 1328 if self.ui.debugflag:
1323 1329 if rev is None:
1324 1330 jmanifestnode = 'null'
1325 1331 else:
1326 1332 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1327 1333 self.ui.write(',\n "manifest": %s' % jmanifestnode)
1328 1334
1329 1335 self.ui.write(',\n "extra": {%s}' %
1330 1336 ", ".join('"%s": "%s"' % (j(k), j(v))
1331 1337 for k, v in ctx.extra().items()))
1332 1338
1333 1339 files = ctx.p1().status(ctx)
1334 1340 self.ui.write(',\n "modified": [%s]' %
1335 1341 ", ".join('"%s"' % j(f) for f in files[0]))
1336 1342 self.ui.write(',\n "added": [%s]' %
1337 1343 ", ".join('"%s"' % j(f) for f in files[1]))
1338 1344 self.ui.write(',\n "removed": [%s]' %
1339 1345 ", ".join('"%s"' % j(f) for f in files[2]))
1340 1346
1341 1347 elif self.ui.verbose:
1342 1348 self.ui.write(',\n "files": [%s]' %
1343 1349 ", ".join('"%s"' % j(f) for f in ctx.files()))
1344 1350
1345 1351 if copies:
1346 1352 self.ui.write(',\n "copies": {%s}' %
1347 1353 ", ".join('"%s": "%s"' % (j(k), j(v))
1348 1354 for k, v in copies))
1349 1355
1350 1356 matchfn = self.matchfn
1351 1357 if matchfn:
1352 1358 stat = self.diffopts.get('stat')
1353 1359 diff = self.diffopts.get('patch')
1354 1360 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1355 1361 node, prev = ctx.node(), ctx.p1().node()
1356 1362 if stat:
1357 1363 self.ui.pushbuffer()
1358 1364 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1359 1365 match=matchfn, stat=True)
1360 1366 self.ui.write(',\n "diffstat": "%s"' % j(self.ui.popbuffer()))
1361 1367 if diff:
1362 1368 self.ui.pushbuffer()
1363 1369 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1364 1370 match=matchfn, stat=False)
1365 1371 self.ui.write(',\n "diff": "%s"' % j(self.ui.popbuffer()))
1366 1372
1367 1373 self.ui.write("\n }")
1368 1374
1369 1375 class changeset_templater(changeset_printer):
1370 1376 '''format changeset information.'''
1371 1377
1372 1378 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1373 1379 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1374 1380 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
1375 1381 defaulttempl = {
1376 1382 'parent': '{rev}:{node|formatnode} ',
1377 1383 'manifest': '{rev}:{node|formatnode}',
1378 1384 'file_copy': '{name} ({source})',
1379 1385 'extra': '{key}={value|stringescape}'
1380 1386 }
1381 1387 # filecopy is preserved for compatibility reasons
1382 1388 defaulttempl['filecopy'] = defaulttempl['file_copy']
1383 1389 self.t = templater.templater(mapfile, {'formatnode': formatnode},
1384 1390 cache=defaulttempl)
1385 1391 if tmpl:
1386 1392 self.t.cache['changeset'] = tmpl
1387 1393
1388 1394 self.cache = {}
1389 1395
1390 1396 def _show(self, ctx, copies, matchfn, props):
1391 1397 '''show a single changeset or file revision'''
1392 1398
1393 1399 showlist = templatekw.showlist
1394 1400
1395 1401 # showparents() behaviour depends on ui trace level which
1396 1402 # causes unexpected behaviours at templating level and makes
1397 1403 # it harder to extract it in a standalone function. Its
1398 1404 # behaviour cannot be changed so leave it here for now.
1399 1405 def showparents(**args):
1400 1406 ctx = args['ctx']
1401 1407 parents = [[('rev', p.rev()),
1402 1408 ('node', p.hex()),
1403 1409 ('phase', p.phasestr())]
1404 1410 for p in self._meaningful_parentrevs(ctx)]
1405 1411 return showlist('parent', parents, **args)
1406 1412
1407 1413 props = props.copy()
1408 1414 props.update(templatekw.keywords)
1409 1415 props['parents'] = showparents
1410 1416 props['templ'] = self.t
1411 1417 props['ctx'] = ctx
1412 1418 props['repo'] = self.repo
1413 1419 props['revcache'] = {'copies': copies}
1414 1420 props['cache'] = self.cache
1415 1421
1416 1422 # find correct templates for current mode
1417 1423
1418 1424 tmplmodes = [
1419 1425 (True, None),
1420 1426 (self.ui.verbose, 'verbose'),
1421 1427 (self.ui.quiet, 'quiet'),
1422 1428 (self.ui.debugflag, 'debug'),
1423 1429 ]
1424 1430
1425 1431 types = {'header': '', 'footer':'', 'changeset': 'changeset'}
1426 1432 for mode, postfix in tmplmodes:
1427 1433 for type in types:
1428 1434 cur = postfix and ('%s_%s' % (type, postfix)) or type
1429 1435 if mode and cur in self.t:
1430 1436 types[type] = cur
1431 1437
1432 1438 try:
1433 1439
1434 1440 # write header
1435 1441 if types['header']:
1436 1442 h = templater.stringify(self.t(types['header'], **props))
1437 1443 if self.buffered:
1438 1444 self.header[ctx.rev()] = h
1439 1445 else:
1440 1446 if self.lastheader != h:
1441 1447 self.lastheader = h
1442 1448 self.ui.write(h)
1443 1449
1444 1450 # write changeset metadata, then patch if requested
1445 1451 key = types['changeset']
1446 1452 self.ui.write(templater.stringify(self.t(key, **props)))
1447 1453 self.showpatch(ctx.node(), matchfn)
1448 1454
1449 1455 if types['footer']:
1450 1456 if not self.footer:
1451 1457 self.footer = templater.stringify(self.t(types['footer'],
1452 1458 **props))
1453 1459
1454 1460 except KeyError, inst:
1455 1461 msg = _("%s: no key named '%s'")
1456 1462 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
1457 1463 except SyntaxError, inst:
1458 1464 raise util.Abort('%s: %s' % (self.t.mapfile, inst.args[0]))
1459 1465
1460 1466 def gettemplate(ui, tmpl, style):
1461 1467 """
1462 1468 Find the template matching the given template spec or style.
1463 1469 """
1464 1470
1465 1471 # ui settings
1466 1472 if not tmpl and not style: # template are stronger than style
1467 1473 tmpl = ui.config('ui', 'logtemplate')
1468 1474 if tmpl:
1469 1475 try:
1470 1476 tmpl = templater.unquotestring(tmpl)
1471 1477 except SyntaxError:
1472 1478 pass
1473 1479 return tmpl, None
1474 1480 else:
1475 1481 style = util.expandpath(ui.config('ui', 'style', ''))
1476 1482
1477 1483 if not tmpl and style:
1478 1484 mapfile = style
1479 1485 if not os.path.split(mapfile)[0]:
1480 1486 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1481 1487 or templater.templatepath(mapfile))
1482 1488 if mapname:
1483 1489 mapfile = mapname
1484 1490 return None, mapfile
1485 1491
1486 1492 if not tmpl:
1487 1493 return None, None
1488 1494
1489 1495 # looks like a literal template?
1490 1496 if '{' in tmpl:
1491 1497 return tmpl, None
1492 1498
1493 1499 # perhaps a stock style?
1494 1500 if not os.path.split(tmpl)[0]:
1495 1501 mapname = (templater.templatepath('map-cmdline.' + tmpl)
1496 1502 or templater.templatepath(tmpl))
1497 1503 if mapname and os.path.isfile(mapname):
1498 1504 return None, mapname
1499 1505
1500 1506 # perhaps it's a reference to [templates]
1501 1507 t = ui.config('templates', tmpl)
1502 1508 if t:
1503 1509 try:
1504 1510 tmpl = templater.unquotestring(t)
1505 1511 except SyntaxError:
1506 1512 tmpl = t
1507 1513 return tmpl, None
1508 1514
1509 1515 if tmpl == 'list':
1510 1516 ui.write(_("available styles: %s\n") % templater.stylelist())
1511 1517 raise util.Abort(_("specify a template"))
1512 1518
1513 1519 # perhaps it's a path to a map or a template
1514 1520 if ('/' in tmpl or '\\' in tmpl) and os.path.isfile(tmpl):
1515 1521 # is it a mapfile for a style?
1516 1522 if os.path.basename(tmpl).startswith("map-"):
1517 1523 return None, os.path.realpath(tmpl)
1518 1524 tmpl = open(tmpl).read()
1519 1525 return tmpl, None
1520 1526
1521 1527 # constant string?
1522 1528 return tmpl, None
1523 1529
1524 1530 def show_changeset(ui, repo, opts, buffered=False):
1525 1531 """show one changeset using template or regular display.
1526 1532
1527 1533 Display format will be the first non-empty hit of:
1528 1534 1. option 'template'
1529 1535 2. option 'style'
1530 1536 3. [ui] setting 'logtemplate'
1531 1537 4. [ui] setting 'style'
1532 1538 If all of these values are either the unset or the empty string,
1533 1539 regular display via changeset_printer() is done.
1534 1540 """
1535 1541 # options
1536 1542 matchfn = None
1537 1543 if opts.get('patch') or opts.get('stat'):
1538 1544 matchfn = scmutil.matchall(repo)
1539 1545
1540 1546 if opts.get('template') == 'json':
1541 1547 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1542 1548
1543 1549 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1544 1550
1545 1551 if not tmpl and not mapfile:
1546 1552 return changeset_printer(ui, repo, matchfn, opts, buffered)
1547 1553
1548 1554 try:
1549 1555 t = changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile,
1550 1556 buffered)
1551 1557 except SyntaxError, inst:
1552 1558 raise util.Abort(inst.args[0])
1553 1559 return t
1554 1560
1555 1561 def showmarker(ui, marker):
1556 1562 """utility function to display obsolescence marker in a readable way
1557 1563
1558 1564 To be used by debug function."""
1559 1565 ui.write(hex(marker.precnode()))
1560 1566 for repl in marker.succnodes():
1561 1567 ui.write(' ')
1562 1568 ui.write(hex(repl))
1563 1569 ui.write(' %X ' % marker.flags())
1564 1570 parents = marker.parentnodes()
1565 1571 if parents is not None:
1566 1572 ui.write('{%s} ' % ', '.join(hex(p) for p in parents))
1567 1573 ui.write('(%s) ' % util.datestr(marker.date()))
1568 1574 ui.write('{%s}' % (', '.join('%r: %r' % t for t in
1569 1575 sorted(marker.metadata().items())
1570 1576 if t[0] != 'date')))
1571 1577 ui.write('\n')
1572 1578
1573 1579 def finddate(ui, repo, date):
1574 1580 """Find the tipmost changeset that matches the given date spec"""
1575 1581
1576 1582 df = util.matchdate(date)
1577 1583 m = scmutil.matchall(repo)
1578 1584 results = {}
1579 1585
1580 1586 def prep(ctx, fns):
1581 1587 d = ctx.date()
1582 1588 if df(d[0]):
1583 1589 results[ctx.rev()] = d
1584 1590
1585 1591 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1586 1592 rev = ctx.rev()
1587 1593 if rev in results:
1588 1594 ui.status(_("found revision %s from %s\n") %
1589 1595 (rev, util.datestr(results[rev])))
1590 1596 return str(rev)
1591 1597
1592 1598 raise util.Abort(_("revision matching date not found"))
1593 1599
1594 1600 def increasingwindows(windowsize=8, sizelimit=512):
1595 1601 while True:
1596 1602 yield windowsize
1597 1603 if windowsize < sizelimit:
1598 1604 windowsize *= 2
1599 1605
1600 1606 class FileWalkError(Exception):
1601 1607 pass
1602 1608
1603 1609 def walkfilerevs(repo, match, follow, revs, fncache):
1604 1610 '''Walks the file history for the matched files.
1605 1611
1606 1612 Returns the changeset revs that are involved in the file history.
1607 1613
1608 1614 Throws FileWalkError if the file history can't be walked using
1609 1615 filelogs alone.
1610 1616 '''
1611 1617 wanted = set()
1612 1618 copies = []
1613 1619 minrev, maxrev = min(revs), max(revs)
1614 1620 def filerevgen(filelog, last):
1615 1621 """
1616 1622 Only files, no patterns. Check the history of each file.
1617 1623
1618 1624 Examines filelog entries within minrev, maxrev linkrev range
1619 1625 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1620 1626 tuples in backwards order
1621 1627 """
1622 1628 cl_count = len(repo)
1623 1629 revs = []
1624 1630 for j in xrange(0, last + 1):
1625 1631 linkrev = filelog.linkrev(j)
1626 1632 if linkrev < minrev:
1627 1633 continue
1628 1634 # only yield rev for which we have the changelog, it can
1629 1635 # happen while doing "hg log" during a pull or commit
1630 1636 if linkrev >= cl_count:
1631 1637 break
1632 1638
1633 1639 parentlinkrevs = []
1634 1640 for p in filelog.parentrevs(j):
1635 1641 if p != nullrev:
1636 1642 parentlinkrevs.append(filelog.linkrev(p))
1637 1643 n = filelog.node(j)
1638 1644 revs.append((linkrev, parentlinkrevs,
1639 1645 follow and filelog.renamed(n)))
1640 1646
1641 1647 return reversed(revs)
1642 1648 def iterfiles():
1643 1649 pctx = repo['.']
1644 1650 for filename in match.files():
1645 1651 if follow:
1646 1652 if filename not in pctx:
1647 1653 raise util.Abort(_('cannot follow file not in parent '
1648 1654 'revision: "%s"') % filename)
1649 1655 yield filename, pctx[filename].filenode()
1650 1656 else:
1651 1657 yield filename, None
1652 1658 for filename_node in copies:
1653 1659 yield filename_node
1654 1660
1655 1661 for file_, node in iterfiles():
1656 1662 filelog = repo.file(file_)
1657 1663 if not len(filelog):
1658 1664 if node is None:
1659 1665 # A zero count may be a directory or deleted file, so
1660 1666 # try to find matching entries on the slow path.
1661 1667 if follow:
1662 1668 raise util.Abort(
1663 1669 _('cannot follow nonexistent file: "%s"') % file_)
1664 1670 raise FileWalkError("Cannot walk via filelog")
1665 1671 else:
1666 1672 continue
1667 1673
1668 1674 if node is None:
1669 1675 last = len(filelog) - 1
1670 1676 else:
1671 1677 last = filelog.rev(node)
1672 1678
1673 1679 # keep track of all ancestors of the file
1674 1680 ancestors = set([filelog.linkrev(last)])
1675 1681
1676 1682 # iterate from latest to oldest revision
1677 1683 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1678 1684 if not follow:
1679 1685 if rev > maxrev:
1680 1686 continue
1681 1687 else:
1682 1688 # Note that last might not be the first interesting
1683 1689 # rev to us:
1684 1690 # if the file has been changed after maxrev, we'll
1685 1691 # have linkrev(last) > maxrev, and we still need
1686 1692 # to explore the file graph
1687 1693 if rev not in ancestors:
1688 1694 continue
1689 1695 # XXX insert 1327 fix here
1690 1696 if flparentlinkrevs:
1691 1697 ancestors.update(flparentlinkrevs)
1692 1698
1693 1699 fncache.setdefault(rev, []).append(file_)
1694 1700 wanted.add(rev)
1695 1701 if copied:
1696 1702 copies.append(copied)
1697 1703
1698 1704 return wanted
1699 1705
1700 1706 class _followfilter(object):
1701 1707 def __init__(self, repo, onlyfirst=False):
1702 1708 self.repo = repo
1703 1709 self.startrev = nullrev
1704 1710 self.roots = set()
1705 1711 self.onlyfirst = onlyfirst
1706 1712
1707 1713 def match(self, rev):
1708 1714 def realparents(rev):
1709 1715 if self.onlyfirst:
1710 1716 return self.repo.changelog.parentrevs(rev)[0:1]
1711 1717 else:
1712 1718 return filter(lambda x: x != nullrev,
1713 1719 self.repo.changelog.parentrevs(rev))
1714 1720
1715 1721 if self.startrev == nullrev:
1716 1722 self.startrev = rev
1717 1723 return True
1718 1724
1719 1725 if rev > self.startrev:
1720 1726 # forward: all descendants
1721 1727 if not self.roots:
1722 1728 self.roots.add(self.startrev)
1723 1729 for parent in realparents(rev):
1724 1730 if parent in self.roots:
1725 1731 self.roots.add(rev)
1726 1732 return True
1727 1733 else:
1728 1734 # backwards: all parents
1729 1735 if not self.roots:
1730 1736 self.roots.update(realparents(self.startrev))
1731 1737 if rev in self.roots:
1732 1738 self.roots.remove(rev)
1733 1739 self.roots.update(realparents(rev))
1734 1740 return True
1735 1741
1736 1742 return False
1737 1743
1738 1744 def walkchangerevs(repo, match, opts, prepare):
1739 1745 '''Iterate over files and the revs in which they changed.
1740 1746
1741 1747 Callers most commonly need to iterate backwards over the history
1742 1748 in which they are interested. Doing so has awful (quadratic-looking)
1743 1749 performance, so we use iterators in a "windowed" way.
1744 1750
1745 1751 We walk a window of revisions in the desired order. Within the
1746 1752 window, we first walk forwards to gather data, then in the desired
1747 1753 order (usually backwards) to display it.
1748 1754
1749 1755 This function returns an iterator yielding contexts. Before
1750 1756 yielding each context, the iterator will first call the prepare
1751 1757 function on each context in the window in forward order.'''
1752 1758
1753 1759 follow = opts.get('follow') or opts.get('follow_first')
1754 1760 revs = _logrevs(repo, opts)
1755 1761 if not revs:
1756 1762 return []
1757 1763 wanted = set()
1758 1764 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1759 1765 opts.get('removed'))
1760 1766 fncache = {}
1761 1767 change = repo.changectx
1762 1768
1763 1769 # First step is to fill wanted, the set of revisions that we want to yield.
1764 1770 # When it does not induce extra cost, we also fill fncache for revisions in
1765 1771 # wanted: a cache of filenames that were changed (ctx.files()) and that
1766 1772 # match the file filtering conditions.
1767 1773
1768 1774 if match.always():
1769 1775 # No files, no patterns. Display all revs.
1770 1776 wanted = revs
1771 1777 elif not slowpath:
1772 1778 # We only have to read through the filelog to find wanted revisions
1773 1779
1774 1780 try:
1775 1781 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1776 1782 except FileWalkError:
1777 1783 slowpath = True
1778 1784
1779 1785 # We decided to fall back to the slowpath because at least one
1780 1786 # of the paths was not a file. Check to see if at least one of them
1781 1787 # existed in history, otherwise simply return
1782 1788 for path in match.files():
1783 1789 if path == '.' or path in repo.store:
1784 1790 break
1785 1791 else:
1786 1792 return []
1787 1793
1788 1794 if slowpath:
1789 1795 # We have to read the changelog to match filenames against
1790 1796 # changed files
1791 1797
1792 1798 if follow:
1793 1799 raise util.Abort(_('can only follow copies/renames for explicit '
1794 1800 'filenames'))
1795 1801
1796 1802 # The slow path checks files modified in every changeset.
1797 1803 # This is really slow on large repos, so compute the set lazily.
1798 1804 class lazywantedset(object):
1799 1805 def __init__(self):
1800 1806 self.set = set()
1801 1807 self.revs = set(revs)
1802 1808
1803 1809 # No need to worry about locality here because it will be accessed
1804 1810 # in the same order as the increasing window below.
1805 1811 def __contains__(self, value):
1806 1812 if value in self.set:
1807 1813 return True
1808 1814 elif not value in self.revs:
1809 1815 return False
1810 1816 else:
1811 1817 self.revs.discard(value)
1812 1818 ctx = change(value)
1813 1819 matches = filter(match, ctx.files())
1814 1820 if matches:
1815 1821 fncache[value] = matches
1816 1822 self.set.add(value)
1817 1823 return True
1818 1824 return False
1819 1825
1820 1826 def discard(self, value):
1821 1827 self.revs.discard(value)
1822 1828 self.set.discard(value)
1823 1829
1824 1830 wanted = lazywantedset()
1825 1831
1826 1832 # it might be worthwhile to do this in the iterator if the rev range
1827 1833 # is descending and the prune args are all within that range
1828 1834 for rev in opts.get('prune', ()):
1829 1835 rev = repo[rev].rev()
1830 1836 ff = _followfilter(repo)
1831 1837 stop = min(revs[0], revs[-1])
1832 1838 for x in xrange(rev, stop - 1, -1):
1833 1839 if ff.match(x):
1834 1840 wanted = wanted - [x]
1835 1841
1836 1842 # Now that wanted is correctly initialized, we can iterate over the
1837 1843 # revision range, yielding only revisions in wanted.
1838 1844 def iterate():
1839 1845 if follow and match.always():
1840 1846 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1841 1847 def want(rev):
1842 1848 return ff.match(rev) and rev in wanted
1843 1849 else:
1844 1850 def want(rev):
1845 1851 return rev in wanted
1846 1852
1847 1853 it = iter(revs)
1848 1854 stopiteration = False
1849 1855 for windowsize in increasingwindows():
1850 1856 nrevs = []
1851 1857 for i in xrange(windowsize):
1852 1858 rev = next(it, None)
1853 1859 if rev is None:
1854 1860 stopiteration = True
1855 1861 break
1856 1862 elif want(rev):
1857 1863 nrevs.append(rev)
1858 1864 for rev in sorted(nrevs):
1859 1865 fns = fncache.get(rev)
1860 1866 ctx = change(rev)
1861 1867 if not fns:
1862 1868 def fns_generator():
1863 1869 for f in ctx.files():
1864 1870 if match(f):
1865 1871 yield f
1866 1872 fns = fns_generator()
1867 1873 prepare(ctx, fns)
1868 1874 for rev in nrevs:
1869 1875 yield change(rev)
1870 1876
1871 1877 if stopiteration:
1872 1878 break
1873 1879
1874 1880 return iterate()
1875 1881
1876 1882 def _makefollowlogfilematcher(repo, files, followfirst):
1877 1883 # When displaying a revision with --patch --follow FILE, we have
1878 1884 # to know which file of the revision must be diffed. With
1879 1885 # --follow, we want the names of the ancestors of FILE in the
1880 1886 # revision, stored in "fcache". "fcache" is populated by
1881 1887 # reproducing the graph traversal already done by --follow revset
1882 1888 # and relating linkrevs to file names (which is not "correct" but
1883 1889 # good enough).
1884 1890 fcache = {}
1885 1891 fcacheready = [False]
1886 1892 pctx = repo['.']
1887 1893
1888 1894 def populate():
1889 1895 for fn in files:
1890 1896 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
1891 1897 for c in i:
1892 1898 fcache.setdefault(c.linkrev(), set()).add(c.path())
1893 1899
1894 1900 def filematcher(rev):
1895 1901 if not fcacheready[0]:
1896 1902 # Lazy initialization
1897 1903 fcacheready[0] = True
1898 1904 populate()
1899 1905 return scmutil.matchfiles(repo, fcache.get(rev, []))
1900 1906
1901 1907 return filematcher
1902 1908
1903 1909 def _makenofollowlogfilematcher(repo, pats, opts):
1904 1910 '''hook for extensions to override the filematcher for non-follow cases'''
1905 1911 return None
1906 1912
1907 1913 def _makelogrevset(repo, pats, opts, revs):
1908 1914 """Return (expr, filematcher) where expr is a revset string built
1909 1915 from log options and file patterns or None. If --stat or --patch
1910 1916 are not passed filematcher is None. Otherwise it is a callable
1911 1917 taking a revision number and returning a match objects filtering
1912 1918 the files to be detailed when displaying the revision.
1913 1919 """
1914 1920 opt2revset = {
1915 1921 'no_merges': ('not merge()', None),
1916 1922 'only_merges': ('merge()', None),
1917 1923 '_ancestors': ('ancestors(%(val)s)', None),
1918 1924 '_fancestors': ('_firstancestors(%(val)s)', None),
1919 1925 '_descendants': ('descendants(%(val)s)', None),
1920 1926 '_fdescendants': ('_firstdescendants(%(val)s)', None),
1921 1927 '_matchfiles': ('_matchfiles(%(val)s)', None),
1922 1928 'date': ('date(%(val)r)', None),
1923 1929 'branch': ('branch(%(val)r)', ' or '),
1924 1930 '_patslog': ('filelog(%(val)r)', ' or '),
1925 1931 '_patsfollow': ('follow(%(val)r)', ' or '),
1926 1932 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
1927 1933 'keyword': ('keyword(%(val)r)', ' or '),
1928 1934 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
1929 1935 'user': ('user(%(val)r)', ' or '),
1930 1936 }
1931 1937
1932 1938 opts = dict(opts)
1933 1939 # follow or not follow?
1934 1940 follow = opts.get('follow') or opts.get('follow_first')
1935 1941 if opts.get('follow_first'):
1936 1942 followfirst = 1
1937 1943 else:
1938 1944 followfirst = 0
1939 1945 # --follow with FILE behaviour depends on revs...
1940 1946 it = iter(revs)
1941 1947 startrev = it.next()
1942 1948 followdescendants = startrev < next(it, startrev)
1943 1949
1944 1950 # branch and only_branch are really aliases and must be handled at
1945 1951 # the same time
1946 1952 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
1947 1953 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
1948 1954 # pats/include/exclude are passed to match.match() directly in
1949 1955 # _matchfiles() revset but walkchangerevs() builds its matcher with
1950 1956 # scmutil.match(). The difference is input pats are globbed on
1951 1957 # platforms without shell expansion (windows).
1952 1958 wctx = repo[None]
1953 1959 match, pats = scmutil.matchandpats(wctx, pats, opts)
1954 1960 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1955 1961 opts.get('removed'))
1956 1962 if not slowpath:
1957 1963 for f in match.files():
1958 1964 if follow and f not in wctx:
1959 1965 # If the file exists, it may be a directory, so let it
1960 1966 # take the slow path.
1961 1967 if os.path.exists(repo.wjoin(f)):
1962 1968 slowpath = True
1963 1969 continue
1964 1970 else:
1965 1971 raise util.Abort(_('cannot follow file not in parent '
1966 1972 'revision: "%s"') % f)
1967 1973 filelog = repo.file(f)
1968 1974 if not filelog:
1969 1975 # A zero count may be a directory or deleted file, so
1970 1976 # try to find matching entries on the slow path.
1971 1977 if follow:
1972 1978 raise util.Abort(
1973 1979 _('cannot follow nonexistent file: "%s"') % f)
1974 1980 slowpath = True
1975 1981
1976 1982 # We decided to fall back to the slowpath because at least one
1977 1983 # of the paths was not a file. Check to see if at least one of them
1978 1984 # existed in history - in that case, we'll continue down the
1979 1985 # slowpath; otherwise, we can turn off the slowpath
1980 1986 if slowpath:
1981 1987 for path in match.files():
1982 1988 if path == '.' or path in repo.store:
1983 1989 break
1984 1990 else:
1985 1991 slowpath = False
1986 1992
1987 1993 fpats = ('_patsfollow', '_patsfollowfirst')
1988 1994 fnopats = (('_ancestors', '_fancestors'),
1989 1995 ('_descendants', '_fdescendants'))
1990 1996 if slowpath:
1991 1997 # See walkchangerevs() slow path.
1992 1998 #
1993 1999 # pats/include/exclude cannot be represented as separate
1994 2000 # revset expressions as their filtering logic applies at file
1995 2001 # level. For instance "-I a -X a" matches a revision touching
1996 2002 # "a" and "b" while "file(a) and not file(b)" does
1997 2003 # not. Besides, filesets are evaluated against the working
1998 2004 # directory.
1999 2005 matchargs = ['r:', 'd:relpath']
2000 2006 for p in pats:
2001 2007 matchargs.append('p:' + p)
2002 2008 for p in opts.get('include', []):
2003 2009 matchargs.append('i:' + p)
2004 2010 for p in opts.get('exclude', []):
2005 2011 matchargs.append('x:' + p)
2006 2012 matchargs = ','.join(('%r' % p) for p in matchargs)
2007 2013 opts['_matchfiles'] = matchargs
2008 2014 if follow:
2009 2015 opts[fnopats[0][followfirst]] = '.'
2010 2016 else:
2011 2017 if follow:
2012 2018 if pats:
2013 2019 # follow() revset interprets its file argument as a
2014 2020 # manifest entry, so use match.files(), not pats.
2015 2021 opts[fpats[followfirst]] = list(match.files())
2016 2022 else:
2017 2023 op = fnopats[followdescendants][followfirst]
2018 2024 opts[op] = 'rev(%d)' % startrev
2019 2025 else:
2020 2026 opts['_patslog'] = list(pats)
2021 2027
2022 2028 filematcher = None
2023 2029 if opts.get('patch') or opts.get('stat'):
2024 2030 # When following files, track renames via a special matcher.
2025 2031 # If we're forced to take the slowpath it means we're following
2026 2032 # at least one pattern/directory, so don't bother with rename tracking.
2027 2033 if follow and not match.always() and not slowpath:
2028 2034 # _makefollowlogfilematcher expects its files argument to be
2029 2035 # relative to the repo root, so use match.files(), not pats.
2030 2036 filematcher = _makefollowlogfilematcher(repo, match.files(),
2031 2037 followfirst)
2032 2038 else:
2033 2039 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2034 2040 if filematcher is None:
2035 2041 filematcher = lambda rev: match
2036 2042
2037 2043 expr = []
2038 2044 for op, val in sorted(opts.iteritems()):
2039 2045 if not val:
2040 2046 continue
2041 2047 if op not in opt2revset:
2042 2048 continue
2043 2049 revop, andor = opt2revset[op]
2044 2050 if '%(val)' not in revop:
2045 2051 expr.append(revop)
2046 2052 else:
2047 2053 if not isinstance(val, list):
2048 2054 e = revop % {'val': val}
2049 2055 else:
2050 2056 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2051 2057 expr.append(e)
2052 2058
2053 2059 if expr:
2054 2060 expr = '(' + ' and '.join(expr) + ')'
2055 2061 else:
2056 2062 expr = None
2057 2063 return expr, filematcher
2058 2064
2059 2065 def _logrevs(repo, opts):
2060 2066 # Default --rev value depends on --follow but --follow behaviour
2061 2067 # depends on revisions resolved from --rev...
2062 2068 follow = opts.get('follow') or opts.get('follow_first')
2063 2069 if opts.get('rev'):
2064 2070 revs = scmutil.revrange(repo, opts['rev'])
2065 2071 elif follow and repo.dirstate.p1() == nullid:
2066 2072 revs = revset.baseset()
2067 2073 elif follow:
2068 2074 revs = repo.revs('reverse(:.)')
2069 2075 else:
2070 2076 revs = revset.spanset(repo)
2071 2077 revs.reverse()
2072 2078 return revs
2073 2079
2074 2080 def getgraphlogrevs(repo, pats, opts):
2075 2081 """Return (revs, expr, filematcher) where revs is an iterable of
2076 2082 revision numbers, expr is a revset string built from log options
2077 2083 and file patterns or None, and used to filter 'revs'. If --stat or
2078 2084 --patch are not passed filematcher is None. Otherwise it is a
2079 2085 callable taking a revision number and returning a match objects
2080 2086 filtering the files to be detailed when displaying the revision.
2081 2087 """
2082 2088 limit = loglimit(opts)
2083 2089 revs = _logrevs(repo, opts)
2084 2090 if not revs:
2085 2091 return revset.baseset(), None, None
2086 2092 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2087 2093 if opts.get('rev'):
2088 2094 # User-specified revs might be unsorted, but don't sort before
2089 2095 # _makelogrevset because it might depend on the order of revs
2090 2096 revs.sort(reverse=True)
2091 2097 if expr:
2092 2098 # Revset matchers often operate faster on revisions in changelog
2093 2099 # order, because most filters deal with the changelog.
2094 2100 revs.reverse()
2095 2101 matcher = revset.match(repo.ui, expr)
2096 2102 # Revset matches can reorder revisions. "A or B" typically returns
2097 2103 # returns the revision matching A then the revision matching B. Sort
2098 2104 # again to fix that.
2099 2105 revs = matcher(repo, revs)
2100 2106 revs.sort(reverse=True)
2101 2107 if limit is not None:
2102 2108 limitedrevs = []
2103 2109 for idx, rev in enumerate(revs):
2104 2110 if idx >= limit:
2105 2111 break
2106 2112 limitedrevs.append(rev)
2107 2113 revs = revset.baseset(limitedrevs)
2108 2114
2109 2115 return revs, expr, filematcher
2110 2116
2111 2117 def getlogrevs(repo, pats, opts):
2112 2118 """Return (revs, expr, filematcher) where revs is an iterable of
2113 2119 revision numbers, expr is a revset string built from log options
2114 2120 and file patterns or None, and used to filter 'revs'. If --stat or
2115 2121 --patch are not passed filematcher is None. Otherwise it is a
2116 2122 callable taking a revision number and returning a match objects
2117 2123 filtering the files to be detailed when displaying the revision.
2118 2124 """
2119 2125 limit = loglimit(opts)
2120 2126 revs = _logrevs(repo, opts)
2121 2127 if not revs:
2122 2128 return revset.baseset([]), None, None
2123 2129 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2124 2130 if expr:
2125 2131 # Revset matchers often operate faster on revisions in changelog
2126 2132 # order, because most filters deal with the changelog.
2127 2133 if not opts.get('rev'):
2128 2134 revs.reverse()
2129 2135 matcher = revset.match(repo.ui, expr)
2130 2136 # Revset matches can reorder revisions. "A or B" typically returns
2131 2137 # returns the revision matching A then the revision matching B. Sort
2132 2138 # again to fix that.
2133 2139 revs = matcher(repo, revs)
2134 2140 if not opts.get('rev'):
2135 2141 revs.sort(reverse=True)
2136 2142 if limit is not None:
2137 2143 limitedrevs = []
2138 2144 for idx, r in enumerate(revs):
2139 2145 if limit <= idx:
2140 2146 break
2141 2147 limitedrevs.append(r)
2142 2148 revs = revset.baseset(limitedrevs)
2143 2149
2144 2150 return revs, expr, filematcher
2145 2151
2146 2152 def displaygraph(ui, dag, displayer, showparents, edgefn, getrenamed=None,
2147 2153 filematcher=None):
2148 2154 seen, state = [], graphmod.asciistate()
2149 2155 for rev, type, ctx, parents in dag:
2150 2156 char = 'o'
2151 2157 if ctx.node() in showparents:
2152 2158 char = '@'
2153 2159 elif ctx.obsolete():
2154 2160 char = 'x'
2155 2161 elif ctx.closesbranch():
2156 2162 char = '_'
2157 2163 copies = None
2158 2164 if getrenamed and ctx.rev():
2159 2165 copies = []
2160 2166 for fn in ctx.files():
2161 2167 rename = getrenamed(fn, ctx.rev())
2162 2168 if rename:
2163 2169 copies.append((fn, rename[0]))
2164 2170 revmatchfn = None
2165 2171 if filematcher is not None:
2166 2172 revmatchfn = filematcher(ctx.rev())
2167 2173 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2168 2174 lines = displayer.hunk.pop(rev).split('\n')
2169 2175 if not lines[-1]:
2170 2176 del lines[-1]
2171 2177 displayer.flush(rev)
2172 2178 edges = edgefn(type, char, lines, seen, rev, parents)
2173 2179 for type, char, lines, coldata in edges:
2174 2180 graphmod.ascii(ui, state, type, char, lines, coldata)
2175 2181 displayer.close()
2176 2182
2177 2183 def graphlog(ui, repo, *pats, **opts):
2178 2184 # Parameters are identical to log command ones
2179 2185 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2180 2186 revdag = graphmod.dagwalker(repo, revs)
2181 2187
2182 2188 getrenamed = None
2183 2189 if opts.get('copies'):
2184 2190 endrev = None
2185 2191 if opts.get('rev'):
2186 2192 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2187 2193 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2188 2194 displayer = show_changeset(ui, repo, opts, buffered=True)
2189 2195 showparents = [ctx.node() for ctx in repo[None].parents()]
2190 2196 displaygraph(ui, revdag, displayer, showparents,
2191 2197 graphmod.asciiedges, getrenamed, filematcher)
2192 2198
2193 2199 def checkunsupportedgraphflags(pats, opts):
2194 2200 for op in ["newest_first"]:
2195 2201 if op in opts and opts[op]:
2196 2202 raise util.Abort(_("-G/--graph option is incompatible with --%s")
2197 2203 % op.replace("_", "-"))
2198 2204
2199 2205 def graphrevs(repo, nodes, opts):
2200 2206 limit = loglimit(opts)
2201 2207 nodes.reverse()
2202 2208 if limit is not None:
2203 2209 nodes = nodes[:limit]
2204 2210 return graphmod.nodes(repo, nodes)
2205 2211
2206 2212 def add(ui, repo, match, prefix, explicitonly, **opts):
2207 2213 join = lambda f: os.path.join(prefix, f)
2208 2214 bad = []
2209 2215 oldbad = match.bad
2210 2216 match.bad = lambda x, y: bad.append(x) or oldbad(x, y)
2211 2217 names = []
2212 2218 wctx = repo[None]
2213 2219 cca = None
2214 2220 abort, warn = scmutil.checkportabilityalert(ui)
2215 2221 if abort or warn:
2216 2222 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2217 2223 for f in wctx.walk(match):
2218 2224 exact = match.exact(f)
2219 2225 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2220 2226 if cca:
2221 2227 cca(f)
2222 2228 names.append(f)
2223 2229 if ui.verbose or not exact:
2224 2230 ui.status(_('adding %s\n') % match.rel(f))
2225 2231
2226 2232 for subpath in sorted(wctx.substate):
2227 2233 sub = wctx.sub(subpath)
2228 2234 try:
2229 2235 submatch = matchmod.narrowmatcher(subpath, match)
2230 2236 if opts.get('subrepos'):
2231 2237 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2232 2238 else:
2233 2239 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2234 2240 except error.LookupError:
2235 2241 ui.status(_("skipping missing subrepository: %s\n")
2236 2242 % join(subpath))
2237 2243
2238 2244 if not opts.get('dry_run'):
2239 2245 rejected = wctx.add(names, prefix)
2240 2246 bad.extend(f for f in rejected if f in match.files())
2241 2247 return bad
2242 2248
2243 2249 def forget(ui, repo, match, prefix, explicitonly):
2244 2250 join = lambda f: os.path.join(prefix, f)
2245 2251 bad = []
2246 2252 oldbad = match.bad
2247 2253 match.bad = lambda x, y: bad.append(x) or oldbad(x, y)
2248 2254 wctx = repo[None]
2249 2255 forgot = []
2250 2256 s = repo.status(match=match, clean=True)
2251 2257 forget = sorted(s[0] + s[1] + s[3] + s[6])
2252 2258 if explicitonly:
2253 2259 forget = [f for f in forget if match.exact(f)]
2254 2260
2255 2261 for subpath in sorted(wctx.substate):
2256 2262 sub = wctx.sub(subpath)
2257 2263 try:
2258 2264 submatch = matchmod.narrowmatcher(subpath, match)
2259 2265 subbad, subforgot = sub.forget(submatch, prefix)
2260 2266 bad.extend([subpath + '/' + f for f in subbad])
2261 2267 forgot.extend([subpath + '/' + f for f in subforgot])
2262 2268 except error.LookupError:
2263 2269 ui.status(_("skipping missing subrepository: %s\n")
2264 2270 % join(subpath))
2265 2271
2266 2272 if not explicitonly:
2267 2273 for f in match.files():
2268 2274 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2269 2275 if f not in forgot:
2270 2276 if repo.wvfs.exists(f):
2271 2277 # Don't complain if the exact case match wasn't given.
2272 2278 # But don't do this until after checking 'forgot', so
2273 2279 # that subrepo files aren't normalized, and this op is
2274 2280 # purely from data cached by the status walk above.
2275 2281 if repo.dirstate.normalize(f) in repo.dirstate:
2276 2282 continue
2277 2283 ui.warn(_('not removing %s: '
2278 2284 'file is already untracked\n')
2279 2285 % match.rel(f))
2280 2286 bad.append(f)
2281 2287
2282 2288 for f in forget:
2283 2289 if ui.verbose or not match.exact(f):
2284 2290 ui.status(_('removing %s\n') % match.rel(f))
2285 2291
2286 2292 rejected = wctx.forget(forget, prefix)
2287 2293 bad.extend(f for f in rejected if f in match.files())
2288 2294 forgot.extend(f for f in forget if f not in rejected)
2289 2295 return bad, forgot
2290 2296
2291 2297 def files(ui, ctx, m, fm, fmt, subrepos):
2292 2298 rev = ctx.rev()
2293 2299 ret = 1
2294 2300 ds = ctx.repo().dirstate
2295 2301
2296 2302 for f in ctx.matches(m):
2297 2303 if rev is None and ds[f] == 'r':
2298 2304 continue
2299 2305 fm.startitem()
2300 2306 if ui.verbose:
2301 2307 fc = ctx[f]
2302 2308 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2303 2309 fm.data(abspath=f)
2304 2310 fm.write('path', fmt, m.rel(f))
2305 2311 ret = 0
2306 2312
2307 2313 for subpath in sorted(ctx.substate):
2308 2314 def matchessubrepo(subpath):
2309 2315 return (m.always() or m.exact(subpath)
2310 2316 or any(f.startswith(subpath + '/') for f in m.files()))
2311 2317
2312 2318 if subrepos or matchessubrepo(subpath):
2313 2319 sub = ctx.sub(subpath)
2314 2320 try:
2315 2321 submatch = matchmod.narrowmatcher(subpath, m)
2316 2322 if sub.printfiles(ui, submatch, fm, fmt, subrepos) == 0:
2317 2323 ret = 0
2318 2324 except error.LookupError:
2319 2325 ui.status(_("skipping missing subrepository: %s\n")
2320 2326 % m.abs(subpath))
2321 2327
2322 2328 return ret
2323 2329
2324 2330 def remove(ui, repo, m, prefix, after, force, subrepos):
2325 2331 join = lambda f: os.path.join(prefix, f)
2326 2332 ret = 0
2327 2333 s = repo.status(match=m, clean=True)
2328 2334 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2329 2335
2330 2336 wctx = repo[None]
2331 2337
2332 2338 for subpath in sorted(wctx.substate):
2333 2339 def matchessubrepo(matcher, subpath):
2334 2340 if matcher.exact(subpath):
2335 2341 return True
2336 2342 for f in matcher.files():
2337 2343 if f.startswith(subpath):
2338 2344 return True
2339 2345 return False
2340 2346
2341 2347 if subrepos or matchessubrepo(m, subpath):
2342 2348 sub = wctx.sub(subpath)
2343 2349 try:
2344 2350 submatch = matchmod.narrowmatcher(subpath, m)
2345 2351 if sub.removefiles(submatch, prefix, after, force, subrepos):
2346 2352 ret = 1
2347 2353 except error.LookupError:
2348 2354 ui.status(_("skipping missing subrepository: %s\n")
2349 2355 % join(subpath))
2350 2356
2351 2357 # warn about failure to delete explicit files/dirs
2352 2358 deleteddirs = util.dirs(deleted)
2353 2359 for f in m.files():
2354 2360 def insubrepo():
2355 2361 for subpath in wctx.substate:
2356 2362 if f.startswith(subpath):
2357 2363 return True
2358 2364 return False
2359 2365
2360 2366 isdir = f in deleteddirs or wctx.hasdir(f)
2361 2367 if f in repo.dirstate or isdir or f == '.' or insubrepo():
2362 2368 continue
2363 2369
2364 2370 if repo.wvfs.exists(f):
2365 2371 if repo.wvfs.isdir(f):
2366 2372 ui.warn(_('not removing %s: no tracked files\n')
2367 2373 % m.rel(f))
2368 2374 else:
2369 2375 ui.warn(_('not removing %s: file is untracked\n')
2370 2376 % m.rel(f))
2371 2377 # missing files will generate a warning elsewhere
2372 2378 ret = 1
2373 2379
2374 2380 if force:
2375 2381 list = modified + deleted + clean + added
2376 2382 elif after:
2377 2383 list = deleted
2378 2384 for f in modified + added + clean:
2379 2385 ui.warn(_('not removing %s: file still exists\n') % m.rel(f))
2380 2386 ret = 1
2381 2387 else:
2382 2388 list = deleted + clean
2383 2389 for f in modified:
2384 2390 ui.warn(_('not removing %s: file is modified (use -f'
2385 2391 ' to force removal)\n') % m.rel(f))
2386 2392 ret = 1
2387 2393 for f in added:
2388 2394 ui.warn(_('not removing %s: file has been marked for add'
2389 2395 ' (use forget to undo)\n') % m.rel(f))
2390 2396 ret = 1
2391 2397
2392 2398 for f in sorted(list):
2393 2399 if ui.verbose or not m.exact(f):
2394 2400 ui.status(_('removing %s\n') % m.rel(f))
2395 2401
2396 2402 wlock = repo.wlock()
2397 2403 try:
2398 2404 if not after:
2399 2405 for f in list:
2400 2406 if f in added:
2401 2407 continue # we never unlink added files on remove
2402 2408 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
2403 2409 repo[None].forget(list)
2404 2410 finally:
2405 2411 wlock.release()
2406 2412
2407 2413 return ret
2408 2414
2409 2415 def cat(ui, repo, ctx, matcher, prefix, **opts):
2410 2416 err = 1
2411 2417
2412 2418 def write(path):
2413 2419 fp = makefileobj(repo, opts.get('output'), ctx.node(),
2414 2420 pathname=os.path.join(prefix, path))
2415 2421 data = ctx[path].data()
2416 2422 if opts.get('decode'):
2417 2423 data = repo.wwritedata(path, data)
2418 2424 fp.write(data)
2419 2425 fp.close()
2420 2426
2421 2427 # Automation often uses hg cat on single files, so special case it
2422 2428 # for performance to avoid the cost of parsing the manifest.
2423 2429 if len(matcher.files()) == 1 and not matcher.anypats():
2424 2430 file = matcher.files()[0]
2425 2431 mf = repo.manifest
2426 2432 mfnode = ctx.manifestnode()
2427 2433 if mfnode and mf.find(mfnode, file)[0]:
2428 2434 write(file)
2429 2435 return 0
2430 2436
2431 2437 # Don't warn about "missing" files that are really in subrepos
2432 2438 bad = matcher.bad
2433 2439
2434 2440 def badfn(path, msg):
2435 2441 for subpath in ctx.substate:
2436 2442 if path.startswith(subpath):
2437 2443 return
2438 2444 bad(path, msg)
2439 2445
2440 2446 matcher.bad = badfn
2441 2447
2442 2448 for abs in ctx.walk(matcher):
2443 2449 write(abs)
2444 2450 err = 0
2445 2451
2446 2452 matcher.bad = bad
2447 2453
2448 2454 for subpath in sorted(ctx.substate):
2449 2455 sub = ctx.sub(subpath)
2450 2456 try:
2451 2457 submatch = matchmod.narrowmatcher(subpath, matcher)
2452 2458
2453 2459 if not sub.cat(submatch, os.path.join(prefix, sub._path),
2454 2460 **opts):
2455 2461 err = 0
2456 2462 except error.RepoLookupError:
2457 2463 ui.status(_("skipping missing subrepository: %s\n")
2458 2464 % os.path.join(prefix, subpath))
2459 2465
2460 2466 return err
2461 2467
2462 2468 def commit(ui, repo, commitfunc, pats, opts):
2463 2469 '''commit the specified files or all outstanding changes'''
2464 2470 date = opts.get('date')
2465 2471 if date:
2466 2472 opts['date'] = util.parsedate(date)
2467 2473 message = logmessage(ui, opts)
2468 2474 matcher = scmutil.match(repo[None], pats, opts)
2469 2475
2470 2476 # extract addremove carefully -- this function can be called from a command
2471 2477 # that doesn't support addremove
2472 2478 if opts.get('addremove'):
2473 2479 if scmutil.addremove(repo, matcher, "", opts) != 0:
2474 2480 raise util.Abort(
2475 2481 _("failed to mark all new/missing files as added/removed"))
2476 2482
2477 2483 return commitfunc(ui, repo, message, matcher, opts)
2478 2484
2479 2485 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2480 2486 # amend will reuse the existing user if not specified, but the obsolete
2481 2487 # marker creation requires that the current user's name is specified.
2482 2488 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2483 2489 ui.username() # raise exception if username not set
2484 2490
2485 2491 ui.note(_('amending changeset %s\n') % old)
2486 2492 base = old.p1()
2487 2493
2488 2494 wlock = dsguard = lock = newid = None
2489 2495 try:
2490 2496 wlock = repo.wlock()
2491 2497 dsguard = dirstateguard(repo, 'amend')
2492 2498 lock = repo.lock()
2493 2499 tr = repo.transaction('amend')
2494 2500 try:
2495 2501 # See if we got a message from -m or -l, if not, open the editor
2496 2502 # with the message of the changeset to amend
2497 2503 message = logmessage(ui, opts)
2498 2504 # ensure logfile does not conflict with later enforcement of the
2499 2505 # message. potential logfile content has been processed by
2500 2506 # `logmessage` anyway.
2501 2507 opts.pop('logfile')
2502 2508 # First, do a regular commit to record all changes in the working
2503 2509 # directory (if there are any)
2504 2510 ui.callhooks = False
2505 2511 activebookmark = repo._activebookmark
2506 2512 try:
2507 2513 repo._activebookmark = None
2508 2514 opts['message'] = 'temporary amend commit for %s' % old
2509 2515 node = commit(ui, repo, commitfunc, pats, opts)
2510 2516 finally:
2511 2517 repo._activebookmark = activebookmark
2512 2518 ui.callhooks = True
2513 2519 ctx = repo[node]
2514 2520
2515 2521 # Participating changesets:
2516 2522 #
2517 2523 # node/ctx o - new (intermediate) commit that contains changes
2518 2524 # | from working dir to go into amending commit
2519 2525 # | (or a workingctx if there were no changes)
2520 2526 # |
2521 2527 # old o - changeset to amend
2522 2528 # |
2523 2529 # base o - parent of amending changeset
2524 2530
2525 2531 # Update extra dict from amended commit (e.g. to preserve graft
2526 2532 # source)
2527 2533 extra.update(old.extra())
2528 2534
2529 2535 # Also update it from the intermediate commit or from the wctx
2530 2536 extra.update(ctx.extra())
2531 2537
2532 2538 if len(old.parents()) > 1:
2533 2539 # ctx.files() isn't reliable for merges, so fall back to the
2534 2540 # slower repo.status() method
2535 2541 files = set([fn for st in repo.status(base, old)[:3]
2536 2542 for fn in st])
2537 2543 else:
2538 2544 files = set(old.files())
2539 2545
2540 2546 # Second, we use either the commit we just did, or if there were no
2541 2547 # changes the parent of the working directory as the version of the
2542 2548 # files in the final amend commit
2543 2549 if node:
2544 2550 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2545 2551
2546 2552 user = ctx.user()
2547 2553 date = ctx.date()
2548 2554 # Recompute copies (avoid recording a -> b -> a)
2549 2555 copied = copies.pathcopies(base, ctx)
2550 2556 if old.p2:
2551 2557 copied.update(copies.pathcopies(old.p2(), ctx))
2552 2558
2553 2559 # Prune files which were reverted by the updates: if old
2554 2560 # introduced file X and our intermediate commit, node,
2555 2561 # renamed that file, then those two files are the same and
2556 2562 # we can discard X from our list of files. Likewise if X
2557 2563 # was deleted, it's no longer relevant
2558 2564 files.update(ctx.files())
2559 2565
2560 2566 def samefile(f):
2561 2567 if f in ctx.manifest():
2562 2568 a = ctx.filectx(f)
2563 2569 if f in base.manifest():
2564 2570 b = base.filectx(f)
2565 2571 return (not a.cmp(b)
2566 2572 and a.flags() == b.flags())
2567 2573 else:
2568 2574 return False
2569 2575 else:
2570 2576 return f not in base.manifest()
2571 2577 files = [f for f in files if not samefile(f)]
2572 2578
2573 2579 def filectxfn(repo, ctx_, path):
2574 2580 try:
2575 2581 fctx = ctx[path]
2576 2582 flags = fctx.flags()
2577 2583 mctx = context.memfilectx(repo,
2578 2584 fctx.path(), fctx.data(),
2579 2585 islink='l' in flags,
2580 2586 isexec='x' in flags,
2581 2587 copied=copied.get(path))
2582 2588 return mctx
2583 2589 except KeyError:
2584 2590 return None
2585 2591 else:
2586 2592 ui.note(_('copying changeset %s to %s\n') % (old, base))
2587 2593
2588 2594 # Use version of files as in the old cset
2589 2595 def filectxfn(repo, ctx_, path):
2590 2596 try:
2591 2597 return old.filectx(path)
2592 2598 except KeyError:
2593 2599 return None
2594 2600
2595 2601 user = opts.get('user') or old.user()
2596 2602 date = opts.get('date') or old.date()
2597 2603 editform = mergeeditform(old, 'commit.amend')
2598 2604 editor = getcommiteditor(editform=editform, **opts)
2599 2605 if not message:
2600 2606 editor = getcommiteditor(edit=True, editform=editform)
2601 2607 message = old.description()
2602 2608
2603 2609 pureextra = extra.copy()
2604 2610 extra['amend_source'] = old.hex()
2605 2611
2606 2612 new = context.memctx(repo,
2607 2613 parents=[base.node(), old.p2().node()],
2608 2614 text=message,
2609 2615 files=files,
2610 2616 filectxfn=filectxfn,
2611 2617 user=user,
2612 2618 date=date,
2613 2619 extra=extra,
2614 2620 editor=editor)
2615 2621
2616 2622 newdesc = changelog.stripdesc(new.description())
2617 2623 if ((not node)
2618 2624 and newdesc == old.description()
2619 2625 and user == old.user()
2620 2626 and date == old.date()
2621 2627 and pureextra == old.extra()):
2622 2628 # nothing changed. continuing here would create a new node
2623 2629 # anyway because of the amend_source noise.
2624 2630 #
2625 2631 # This not what we expect from amend.
2626 2632 return old.node()
2627 2633
2628 2634 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2629 2635 try:
2630 2636 if opts.get('secret'):
2631 2637 commitphase = 'secret'
2632 2638 else:
2633 2639 commitphase = old.phase()
2634 2640 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2635 2641 newid = repo.commitctx(new)
2636 2642 finally:
2637 2643 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2638 2644 if newid != old.node():
2639 2645 # Reroute the working copy parent to the new changeset
2640 2646 repo.setparents(newid, nullid)
2641 2647
2642 2648 # Move bookmarks from old parent to amend commit
2643 2649 bms = repo.nodebookmarks(old.node())
2644 2650 if bms:
2645 2651 marks = repo._bookmarks
2646 2652 for bm in bms:
2647 2653 marks[bm] = newid
2648 2654 marks.write()
2649 2655 #commit the whole amend process
2650 2656 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2651 2657 if createmarkers and newid != old.node():
2652 2658 # mark the new changeset as successor of the rewritten one
2653 2659 new = repo[newid]
2654 2660 obs = [(old, (new,))]
2655 2661 if node:
2656 2662 obs.append((ctx, ()))
2657 2663
2658 2664 obsolete.createmarkers(repo, obs)
2659 2665 tr.close()
2660 2666 finally:
2661 2667 tr.release()
2662 2668 dsguard.close()
2663 2669 if not createmarkers and newid != old.node():
2664 2670 # Strip the intermediate commit (if there was one) and the amended
2665 2671 # commit
2666 2672 if node:
2667 2673 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2668 2674 ui.note(_('stripping amended changeset %s\n') % old)
2669 2675 repair.strip(ui, repo, old.node(), topic='amend-backup')
2670 2676 finally:
2671 2677 lockmod.release(lock, dsguard, wlock)
2672 2678 return newid
2673 2679
2674 2680 def commiteditor(repo, ctx, subs, editform=''):
2675 2681 if ctx.description():
2676 2682 return ctx.description()
2677 2683 return commitforceeditor(repo, ctx, subs, editform=editform)
2678 2684
2679 2685 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2680 2686 editform=''):
2681 2687 if not extramsg:
2682 2688 extramsg = _("Leave message empty to abort commit.")
2683 2689
2684 2690 forms = [e for e in editform.split('.') if e]
2685 2691 forms.insert(0, 'changeset')
2686 2692 while forms:
2687 2693 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2688 2694 if tmpl:
2689 2695 committext = buildcommittemplate(repo, ctx, subs, extramsg, tmpl)
2690 2696 break
2691 2697 forms.pop()
2692 2698 else:
2693 2699 committext = buildcommittext(repo, ctx, subs, extramsg)
2694 2700
2695 2701 # run editor in the repository root
2696 2702 olddir = os.getcwd()
2697 2703 os.chdir(repo.root)
2698 2704 text = repo.ui.edit(committext, ctx.user(), ctx.extra(), editform=editform)
2699 2705 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2700 2706 os.chdir(olddir)
2701 2707
2702 2708 if finishdesc:
2703 2709 text = finishdesc(text)
2704 2710 if not text.strip():
2705 2711 raise util.Abort(_("empty commit message"))
2706 2712
2707 2713 return text
2708 2714
2709 2715 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2710 2716 ui = repo.ui
2711 2717 tmpl, mapfile = gettemplate(ui, tmpl, None)
2712 2718
2713 2719 try:
2714 2720 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2715 2721 except SyntaxError, inst:
2716 2722 raise util.Abort(inst.args[0])
2717 2723
2718 2724 for k, v in repo.ui.configitems('committemplate'):
2719 2725 if k != 'changeset':
2720 2726 t.t.cache[k] = v
2721 2727
2722 2728 if not extramsg:
2723 2729 extramsg = '' # ensure that extramsg is string
2724 2730
2725 2731 ui.pushbuffer()
2726 2732 t.show(ctx, extramsg=extramsg)
2727 2733 return ui.popbuffer()
2728 2734
2729 2735 def buildcommittext(repo, ctx, subs, extramsg):
2730 2736 edittext = []
2731 2737 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2732 2738 if ctx.description():
2733 2739 edittext.append(ctx.description())
2734 2740 edittext.append("")
2735 2741 edittext.append("") # Empty line between message and comments.
2736 2742 edittext.append(_("HG: Enter commit message."
2737 2743 " Lines beginning with 'HG:' are removed."))
2738 2744 edittext.append("HG: %s" % extramsg)
2739 2745 edittext.append("HG: --")
2740 2746 edittext.append(_("HG: user: %s") % ctx.user())
2741 2747 if ctx.p2():
2742 2748 edittext.append(_("HG: branch merge"))
2743 2749 if ctx.branch():
2744 2750 edittext.append(_("HG: branch '%s'") % ctx.branch())
2745 2751 if bookmarks.isactivewdirparent(repo):
2746 2752 edittext.append(_("HG: bookmark '%s'") % repo._activebookmark)
2747 2753 edittext.extend([_("HG: subrepo %s") % s for s in subs])
2748 2754 edittext.extend([_("HG: added %s") % f for f in added])
2749 2755 edittext.extend([_("HG: changed %s") % f for f in modified])
2750 2756 edittext.extend([_("HG: removed %s") % f for f in removed])
2751 2757 if not added and not modified and not removed:
2752 2758 edittext.append(_("HG: no files changed"))
2753 2759 edittext.append("")
2754 2760
2755 2761 return "\n".join(edittext)
2756 2762
2757 2763 def commitstatus(repo, node, branch, bheads=None, opts={}):
2758 2764 ctx = repo[node]
2759 2765 parents = ctx.parents()
2760 2766
2761 2767 if (not opts.get('amend') and bheads and node not in bheads and not
2762 2768 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2763 2769 repo.ui.status(_('created new head\n'))
2764 2770 # The message is not printed for initial roots. For the other
2765 2771 # changesets, it is printed in the following situations:
2766 2772 #
2767 2773 # Par column: for the 2 parents with ...
2768 2774 # N: null or no parent
2769 2775 # B: parent is on another named branch
2770 2776 # C: parent is a regular non head changeset
2771 2777 # H: parent was a branch head of the current branch
2772 2778 # Msg column: whether we print "created new head" message
2773 2779 # In the following, it is assumed that there already exists some
2774 2780 # initial branch heads of the current branch, otherwise nothing is
2775 2781 # printed anyway.
2776 2782 #
2777 2783 # Par Msg Comment
2778 2784 # N N y additional topo root
2779 2785 #
2780 2786 # B N y additional branch root
2781 2787 # C N y additional topo head
2782 2788 # H N n usual case
2783 2789 #
2784 2790 # B B y weird additional branch root
2785 2791 # C B y branch merge
2786 2792 # H B n merge with named branch
2787 2793 #
2788 2794 # C C y additional head from merge
2789 2795 # C H n merge with a head
2790 2796 #
2791 2797 # H H n head merge: head count decreases
2792 2798
2793 2799 if not opts.get('close_branch'):
2794 2800 for r in parents:
2795 2801 if r.closesbranch() and r.branch() == branch:
2796 2802 repo.ui.status(_('reopening closed branch head %d\n') % r)
2797 2803
2798 2804 if repo.ui.debugflag:
2799 2805 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2800 2806 elif repo.ui.verbose:
2801 2807 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2802 2808
2803 2809 def revert(ui, repo, ctx, parents, *pats, **opts):
2804 2810 parent, p2 = parents
2805 2811 node = ctx.node()
2806 2812
2807 2813 mf = ctx.manifest()
2808 2814 if node == p2:
2809 2815 parent = p2
2810 2816 if node == parent:
2811 2817 pmf = mf
2812 2818 else:
2813 2819 pmf = None
2814 2820
2815 2821 # need all matching names in dirstate and manifest of target rev,
2816 2822 # so have to walk both. do not print errors if files exist in one
2817 2823 # but not other. in both cases, filesets should be evaluated against
2818 2824 # workingctx to get consistent result (issue4497). this means 'set:**'
2819 2825 # cannot be used to select missing files from target rev.
2820 2826
2821 2827 # `names` is a mapping for all elements in working copy and target revision
2822 2828 # The mapping is in the form:
2823 2829 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2824 2830 names = {}
2825 2831
2826 2832 wlock = repo.wlock()
2827 2833 try:
2828 2834 ## filling of the `names` mapping
2829 2835 # walk dirstate to fill `names`
2830 2836
2831 2837 interactive = opts.get('interactive', False)
2832 2838 wctx = repo[None]
2833 2839 m = scmutil.match(wctx, pats, opts)
2834 2840
2835 2841 # we'll need this later
2836 2842 targetsubs = sorted(s for s in wctx.substate if m(s))
2837 2843
2838 2844 if not m.always():
2839 2845 m.bad = lambda x, y: False
2840 2846 for abs in repo.walk(m):
2841 2847 names[abs] = m.rel(abs), m.exact(abs)
2842 2848
2843 2849 # walk target manifest to fill `names`
2844 2850
2845 2851 def badfn(path, msg):
2846 2852 if path in names:
2847 2853 return
2848 2854 if path in ctx.substate:
2849 2855 return
2850 2856 path_ = path + '/'
2851 2857 for f in names:
2852 2858 if f.startswith(path_):
2853 2859 return
2854 2860 ui.warn("%s: %s\n" % (m.rel(path), msg))
2855 2861
2856 2862 m.bad = badfn
2857 2863 for abs in ctx.walk(m):
2858 2864 if abs not in names:
2859 2865 names[abs] = m.rel(abs), m.exact(abs)
2860 2866
2861 2867 # Find status of all file in `names`.
2862 2868 m = scmutil.matchfiles(repo, names)
2863 2869
2864 2870 changes = repo.status(node1=node, match=m,
2865 2871 unknown=True, ignored=True, clean=True)
2866 2872 else:
2867 2873 changes = repo.status(node1=node, match=m)
2868 2874 for kind in changes:
2869 2875 for abs in kind:
2870 2876 names[abs] = m.rel(abs), m.exact(abs)
2871 2877
2872 2878 m = scmutil.matchfiles(repo, names)
2873 2879
2874 2880 modified = set(changes.modified)
2875 2881 added = set(changes.added)
2876 2882 removed = set(changes.removed)
2877 2883 _deleted = set(changes.deleted)
2878 2884 unknown = set(changes.unknown)
2879 2885 unknown.update(changes.ignored)
2880 2886 clean = set(changes.clean)
2881 2887 modadded = set()
2882 2888
2883 2889 # split between files known in target manifest and the others
2884 2890 smf = set(mf)
2885 2891
2886 2892 # determine the exact nature of the deleted changesets
2887 2893 deladded = _deleted - smf
2888 2894 deleted = _deleted - deladded
2889 2895
2890 2896 # We need to account for the state of the file in the dirstate,
2891 2897 # even when we revert against something else than parent. This will
2892 2898 # slightly alter the behavior of revert (doing back up or not, delete
2893 2899 # or just forget etc).
2894 2900 if parent == node:
2895 2901 dsmodified = modified
2896 2902 dsadded = added
2897 2903 dsremoved = removed
2898 2904 # store all local modifications, useful later for rename detection
2899 2905 localchanges = dsmodified | dsadded
2900 2906 modified, added, removed = set(), set(), set()
2901 2907 else:
2902 2908 changes = repo.status(node1=parent, match=m)
2903 2909 dsmodified = set(changes.modified)
2904 2910 dsadded = set(changes.added)
2905 2911 dsremoved = set(changes.removed)
2906 2912 # store all local modifications, useful later for rename detection
2907 2913 localchanges = dsmodified | dsadded
2908 2914
2909 2915 # only take into account for removes between wc and target
2910 2916 clean |= dsremoved - removed
2911 2917 dsremoved &= removed
2912 2918 # distinct between dirstate remove and other
2913 2919 removed -= dsremoved
2914 2920
2915 2921 modadded = added & dsmodified
2916 2922 added -= modadded
2917 2923
2918 2924 # tell newly modified apart.
2919 2925 dsmodified &= modified
2920 2926 dsmodified |= modified & dsadded # dirstate added may needs backup
2921 2927 modified -= dsmodified
2922 2928
2923 2929 # We need to wait for some post-processing to update this set
2924 2930 # before making the distinction. The dirstate will be used for
2925 2931 # that purpose.
2926 2932 dsadded = added
2927 2933
2928 2934 # in case of merge, files that are actually added can be reported as
2929 2935 # modified, we need to post process the result
2930 2936 if p2 != nullid:
2931 2937 if pmf is None:
2932 2938 # only need parent manifest in the merge case,
2933 2939 # so do not read by default
2934 2940 pmf = repo[parent].manifest()
2935 2941 mergeadd = dsmodified - set(pmf)
2936 2942 dsadded |= mergeadd
2937 2943 dsmodified -= mergeadd
2938 2944
2939 2945 # if f is a rename, update `names` to also revert the source
2940 2946 cwd = repo.getcwd()
2941 2947 for f in localchanges:
2942 2948 src = repo.dirstate.copied(f)
2943 2949 # XXX should we check for rename down to target node?
2944 2950 if src and src not in names and repo.dirstate[src] == 'r':
2945 2951 dsremoved.add(src)
2946 2952 names[src] = (repo.pathto(src, cwd), True)
2947 2953
2948 2954 # distinguish between file to forget and the other
2949 2955 added = set()
2950 2956 for abs in dsadded:
2951 2957 if repo.dirstate[abs] != 'a':
2952 2958 added.add(abs)
2953 2959 dsadded -= added
2954 2960
2955 2961 for abs in deladded:
2956 2962 if repo.dirstate[abs] == 'a':
2957 2963 dsadded.add(abs)
2958 2964 deladded -= dsadded
2959 2965
2960 2966 # For files marked as removed, we check if an unknown file is present at
2961 2967 # the same path. If a such file exists it may need to be backed up.
2962 2968 # Making the distinction at this stage helps have simpler backup
2963 2969 # logic.
2964 2970 removunk = set()
2965 2971 for abs in removed:
2966 2972 target = repo.wjoin(abs)
2967 2973 if os.path.lexists(target):
2968 2974 removunk.add(abs)
2969 2975 removed -= removunk
2970 2976
2971 2977 dsremovunk = set()
2972 2978 for abs in dsremoved:
2973 2979 target = repo.wjoin(abs)
2974 2980 if os.path.lexists(target):
2975 2981 dsremovunk.add(abs)
2976 2982 dsremoved -= dsremovunk
2977 2983
2978 2984 # action to be actually performed by revert
2979 2985 # (<list of file>, message>) tuple
2980 2986 actions = {'revert': ([], _('reverting %s\n')),
2981 2987 'add': ([], _('adding %s\n')),
2982 2988 'remove': ([], _('removing %s\n')),
2983 2989 'drop': ([], _('removing %s\n')),
2984 2990 'forget': ([], _('forgetting %s\n')),
2985 2991 'undelete': ([], _('undeleting %s\n')),
2986 2992 'noop': (None, _('no changes needed to %s\n')),
2987 2993 'unknown': (None, _('file not managed: %s\n')),
2988 2994 }
2989 2995
2990 2996 # "constant" that convey the backup strategy.
2991 2997 # All set to `discard` if `no-backup` is set do avoid checking
2992 2998 # no_backup lower in the code.
2993 2999 # These values are ordered for comparison purposes
2994 3000 backup = 2 # unconditionally do backup
2995 3001 check = 1 # check if the existing file differs from target
2996 3002 discard = 0 # never do backup
2997 3003 if opts.get('no_backup'):
2998 3004 backup = check = discard
2999 3005
3000 3006 backupanddel = actions['remove']
3001 3007 if not opts.get('no_backup'):
3002 3008 backupanddel = actions['drop']
3003 3009
3004 3010 disptable = (
3005 3011 # dispatch table:
3006 3012 # file state
3007 3013 # action
3008 3014 # make backup
3009 3015
3010 3016 ## Sets that results that will change file on disk
3011 3017 # Modified compared to target, no local change
3012 3018 (modified, actions['revert'], discard),
3013 3019 # Modified compared to target, but local file is deleted
3014 3020 (deleted, actions['revert'], discard),
3015 3021 # Modified compared to target, local change
3016 3022 (dsmodified, actions['revert'], backup),
3017 3023 # Added since target
3018 3024 (added, actions['remove'], discard),
3019 3025 # Added in working directory
3020 3026 (dsadded, actions['forget'], discard),
3021 3027 # Added since target, have local modification
3022 3028 (modadded, backupanddel, backup),
3023 3029 # Added since target but file is missing in working directory
3024 3030 (deladded, actions['drop'], discard),
3025 3031 # Removed since target, before working copy parent
3026 3032 (removed, actions['add'], discard),
3027 3033 # Same as `removed` but an unknown file exists at the same path
3028 3034 (removunk, actions['add'], check),
3029 3035 # Removed since targe, marked as such in working copy parent
3030 3036 (dsremoved, actions['undelete'], discard),
3031 3037 # Same as `dsremoved` but an unknown file exists at the same path
3032 3038 (dsremovunk, actions['undelete'], check),
3033 3039 ## the following sets does not result in any file changes
3034 3040 # File with no modification
3035 3041 (clean, actions['noop'], discard),
3036 3042 # Existing file, not tracked anywhere
3037 3043 (unknown, actions['unknown'], discard),
3038 3044 )
3039 3045
3040 3046 for abs, (rel, exact) in sorted(names.items()):
3041 3047 # target file to be touch on disk (relative to cwd)
3042 3048 target = repo.wjoin(abs)
3043 3049 # search the entry in the dispatch table.
3044 3050 # if the file is in any of these sets, it was touched in the working
3045 3051 # directory parent and we are sure it needs to be reverted.
3046 3052 for table, (xlist, msg), dobackup in disptable:
3047 3053 if abs not in table:
3048 3054 continue
3049 3055 if xlist is not None:
3050 3056 xlist.append(abs)
3051 3057 if dobackup and (backup <= dobackup
3052 3058 or wctx[abs].cmp(ctx[abs])):
3053 3059 bakname = "%s.orig" % rel
3054 3060 ui.note(_('saving current version of %s as %s\n') %
3055 3061 (rel, bakname))
3056 3062 if not opts.get('dry_run'):
3057 3063 if interactive:
3058 3064 util.copyfile(target, bakname)
3059 3065 else:
3060 3066 util.rename(target, bakname)
3061 3067 if ui.verbose or not exact:
3062 3068 if not isinstance(msg, basestring):
3063 3069 msg = msg(abs)
3064 3070 ui.status(msg % rel)
3065 3071 elif exact:
3066 3072 ui.warn(msg % rel)
3067 3073 break
3068 3074
3069 3075 if not opts.get('dry_run'):
3070 3076 needdata = ('revert', 'add', 'undelete')
3071 3077 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3072 3078 _performrevert(repo, parents, ctx, actions, interactive)
3073 3079
3074 3080 if targetsubs:
3075 3081 # Revert the subrepos on the revert list
3076 3082 for sub in targetsubs:
3077 3083 try:
3078 3084 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3079 3085 except KeyError:
3080 3086 raise util.Abort("subrepository '%s' does not exist in %s!"
3081 3087 % (sub, short(ctx.node())))
3082 3088 finally:
3083 3089 wlock.release()
3084 3090
3085 3091 def _revertprefetch(repo, ctx, *files):
3086 3092 """Let extension changing the storage layer prefetch content"""
3087 3093 pass
3088 3094
3089 3095 def _performrevert(repo, parents, ctx, actions, interactive=False):
3090 3096 """function that actually perform all the actions computed for revert
3091 3097
3092 3098 This is an independent function to let extension to plug in and react to
3093 3099 the imminent revert.
3094 3100
3095 3101 Make sure you have the working directory locked when calling this function.
3096 3102 """
3097 3103 parent, p2 = parents
3098 3104 node = ctx.node()
3099 3105 def checkout(f):
3100 3106 fc = ctx[f]
3101 3107 return repo.wwrite(f, fc.data(), fc.flags())
3102 3108
3103 3109 audit_path = pathutil.pathauditor(repo.root)
3104 3110 for f in actions['forget'][0]:
3105 3111 repo.dirstate.drop(f)
3106 3112 for f in actions['remove'][0]:
3107 3113 audit_path(f)
3108 3114 try:
3109 3115 util.unlinkpath(repo.wjoin(f))
3110 3116 except OSError:
3111 3117 pass
3112 3118 repo.dirstate.remove(f)
3113 3119 for f in actions['drop'][0]:
3114 3120 audit_path(f)
3115 3121 repo.dirstate.remove(f)
3116 3122
3117 3123 normal = None
3118 3124 if node == parent:
3119 3125 # We're reverting to our parent. If possible, we'd like status
3120 3126 # to report the file as clean. We have to use normallookup for
3121 3127 # merges to avoid losing information about merged/dirty files.
3122 3128 if p2 != nullid:
3123 3129 normal = repo.dirstate.normallookup
3124 3130 else:
3125 3131 normal = repo.dirstate.normal
3126 3132
3127 3133 newlyaddedandmodifiedfiles = set()
3128 3134 if interactive:
3129 3135 # Prompt the user for changes to revert
3130 3136 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3131 3137 m = scmutil.match(ctx, torevert, {})
3132 3138 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3133 3139 diffopts.nodates = True
3134 3140 diffopts.git = True
3135 3141 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3136 3142 originalchunks = patch.parsepatch(diff)
3137 3143 try:
3138 3144 chunks = recordfilter(repo.ui, originalchunks)
3139 3145 except patch.PatchError, err:
3140 3146 raise util.Abort(_('error parsing patch: %s') % err)
3141 3147
3142 3148 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3143 3149 # Apply changes
3144 3150 fp = cStringIO.StringIO()
3145 3151 for c in chunks:
3146 3152 c.write(fp)
3147 3153 dopatch = fp.tell()
3148 3154 fp.seek(0)
3149 3155 if dopatch:
3150 3156 try:
3151 3157 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3152 3158 except patch.PatchError, err:
3153 3159 raise util.Abort(str(err))
3154 3160 del fp
3155 3161 else:
3156 3162 for f in actions['revert'][0]:
3157 3163 wsize = checkout(f)
3158 3164 if normal:
3159 3165 normal(f)
3160 3166 elif wsize == repo.dirstate._map[f][2]:
3161 3167 # changes may be overlooked without normallookup,
3162 3168 # if size isn't changed at reverting
3163 3169 repo.dirstate.normallookup(f)
3164 3170
3165 3171 for f in actions['add'][0]:
3166 3172 # Don't checkout modified files, they are already created by the diff
3167 3173 if f not in newlyaddedandmodifiedfiles:
3168 3174 checkout(f)
3169 3175 repo.dirstate.add(f)
3170 3176
3171 3177 normal = repo.dirstate.normallookup
3172 3178 if node == parent and p2 == nullid:
3173 3179 normal = repo.dirstate.normal
3174 3180 for f in actions['undelete'][0]:
3175 3181 checkout(f)
3176 3182 normal(f)
3177 3183
3178 3184 copied = copies.pathcopies(repo[parent], ctx)
3179 3185
3180 3186 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3181 3187 if f in copied:
3182 3188 repo.dirstate.copy(copied[f], f)
3183 3189
3184 3190 def command(table):
3185 3191 """Returns a function object to be used as a decorator for making commands.
3186 3192
3187 3193 This function receives a command table as its argument. The table should
3188 3194 be a dict.
3189 3195
3190 3196 The returned function can be used as a decorator for adding commands
3191 3197 to that command table. This function accepts multiple arguments to define
3192 3198 a command.
3193 3199
3194 3200 The first argument is the command name.
3195 3201
3196 3202 The options argument is an iterable of tuples defining command arguments.
3197 3203 See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
3198 3204
3199 3205 The synopsis argument defines a short, one line summary of how to use the
3200 3206 command. This shows up in the help output.
3201 3207
3202 3208 The norepo argument defines whether the command does not require a
3203 3209 local repository. Most commands operate against a repository, thus the
3204 3210 default is False.
3205 3211
3206 3212 The optionalrepo argument defines whether the command optionally requires
3207 3213 a local repository.
3208 3214
3209 3215 The inferrepo argument defines whether to try to find a repository from the
3210 3216 command line arguments. If True, arguments will be examined for potential
3211 3217 repository locations. See ``findrepo()``. If a repository is found, it
3212 3218 will be used.
3213 3219 """
3214 3220 def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False,
3215 3221 inferrepo=False):
3216 3222 def decorator(func):
3217 3223 if synopsis:
3218 3224 table[name] = func, list(options), synopsis
3219 3225 else:
3220 3226 table[name] = func, list(options)
3221 3227
3222 3228 if norepo:
3223 3229 # Avoid import cycle.
3224 3230 import commands
3225 3231 commands.norepo += ' %s' % ' '.join(parsealiases(name))
3226 3232
3227 3233 if optionalrepo:
3228 3234 import commands
3229 3235 commands.optionalrepo += ' %s' % ' '.join(parsealiases(name))
3230 3236
3231 3237 if inferrepo:
3232 3238 import commands
3233 3239 commands.inferrepo += ' %s' % ' '.join(parsealiases(name))
3234 3240
3235 3241 return func
3236 3242 return decorator
3237 3243
3238 3244 return cmd
3239 3245
3240 3246 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3241 3247 # commands.outgoing. "missing" is "missing" of the result of
3242 3248 # "findcommonoutgoing()"
3243 3249 outgoinghooks = util.hooks()
3244 3250
3245 3251 # a list of (ui, repo) functions called by commands.summary
3246 3252 summaryhooks = util.hooks()
3247 3253
3248 3254 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3249 3255 #
3250 3256 # functions should return tuple of booleans below, if 'changes' is None:
3251 3257 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3252 3258 #
3253 3259 # otherwise, 'changes' is a tuple of tuples below:
3254 3260 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3255 3261 # - (desturl, destbranch, destpeer, outgoing)
3256 3262 summaryremotehooks = util.hooks()
3257 3263
3258 3264 # A list of state files kept by multistep operations like graft.
3259 3265 # Since graft cannot be aborted, it is considered 'clearable' by update.
3260 3266 # note: bisect is intentionally excluded
3261 3267 # (state file, clearable, allowcommit, error, hint)
3262 3268 unfinishedstates = [
3263 3269 ('graftstate', True, False, _('graft in progress'),
3264 3270 _("use 'hg graft --continue' or 'hg update' to abort")),
3265 3271 ('updatestate', True, False, _('last update was interrupted'),
3266 3272 _("use 'hg update' to get a consistent checkout"))
3267 3273 ]
3268 3274
3269 3275 def checkunfinished(repo, commit=False):
3270 3276 '''Look for an unfinished multistep operation, like graft, and abort
3271 3277 if found. It's probably good to check this right before
3272 3278 bailifchanged().
3273 3279 '''
3274 3280 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3275 3281 if commit and allowcommit:
3276 3282 continue
3277 3283 if repo.vfs.exists(f):
3278 3284 raise util.Abort(msg, hint=hint)
3279 3285
3280 3286 def clearunfinished(repo):
3281 3287 '''Check for unfinished operations (as above), and clear the ones
3282 3288 that are clearable.
3283 3289 '''
3284 3290 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3285 3291 if not clearable and repo.vfs.exists(f):
3286 3292 raise util.Abort(msg, hint=hint)
3287 3293 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3288 3294 if clearable and repo.vfs.exists(f):
3289 3295 util.unlink(repo.join(f))
3290 3296
3291 3297 class dirstateguard(object):
3292 3298 '''Restore dirstate at unexpected failure.
3293 3299
3294 3300 At the construction, this class does:
3295 3301
3296 3302 - write current ``repo.dirstate`` out, and
3297 3303 - save ``.hg/dirstate`` into the backup file
3298 3304
3299 3305 This restores ``.hg/dirstate`` from backup file, if ``release()``
3300 3306 is invoked before ``close()``.
3301 3307
3302 3308 This just removes the backup file at ``close()`` before ``release()``.
3303 3309 '''
3304 3310
3305 3311 def __init__(self, repo, name):
3306 3312 repo.dirstate.write()
3307 3313 self._repo = repo
3308 3314 self._filename = 'dirstate.backup.%s.%d' % (name, id(self))
3309 3315 repo.vfs.write(self._filename, repo.vfs.tryread('dirstate'))
3310 3316 self._active = True
3311 3317 self._closed = False
3312 3318
3313 3319 def __del__(self):
3314 3320 if self._active: # still active
3315 3321 # this may occur, even if this class is used correctly:
3316 3322 # for example, releasing other resources like transaction
3317 3323 # may raise exception before ``dirstateguard.release`` in
3318 3324 # ``release(tr, ....)``.
3319 3325 self._abort()
3320 3326
3321 3327 def close(self):
3322 3328 if not self._active: # already inactivated
3323 3329 msg = (_("can't close already inactivated backup: %s")
3324 3330 % self._filename)
3325 3331 raise util.Abort(msg)
3326 3332
3327 3333 self._repo.vfs.unlink(self._filename)
3328 3334 self._active = False
3329 3335 self._closed = True
3330 3336
3331 3337 def _abort(self):
3332 3338 # this "invalidate()" prevents "wlock.release()" from writing
3333 3339 # changes of dirstate out after restoring to original status
3334 3340 self._repo.dirstate.invalidate()
3335 3341
3336 3342 self._repo.vfs.rename(self._filename, 'dirstate')
3337 3343 self._active = False
3338 3344
3339 3345 def release(self):
3340 3346 if not self._closed:
3341 3347 if not self._active: # already inactivated
3342 3348 msg = (_("can't release already inactivated backup: %s")
3343 3349 % self._filename)
3344 3350 raise util.Abort(msg)
3345 3351 self._abort()
@@ -1,1601 +1,1601 b''
1 1 # stuff related specifically to patch manipulation / parsing
2 2 #
3 3 # Copyright 2008 Mark Edgington <edgimar@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 #
8 8 # This code is based on the Mark Edgington's crecord extension.
9 9 # (Itself based on Bryan O'Sullivan's record extension.)
10 10
11 11 from i18n import _
12 12 import patch as patchmod
13 13 import util, encoding
14 14
15 15 import os, re, sys, struct, signal, tempfile, locale, cStringIO
16 16
17 17 # This is required for ncurses to display non-ASCII characters in default user
18 18 # locale encoding correctly. --immerrr
19 19 locale.setlocale(locale.LC_ALL, '')
20 20
21 21 # os.name is one of: 'posix', 'nt', 'dos', 'os2', 'mac', or 'ce'
22 22 if os.name == 'posix':
23 23 import curses
24 24 import fcntl, termios
25 25 else:
26 26 # I have no idea if wcurses works with crecord...
27 27 try:
28 28 import wcurses as curses
29 29 except ImportError:
30 30 # wcurses is not shipped on Windows by default
31 31 pass
32 32
33 33 try:
34 34 curses
35 35 except NameError:
36 36 if os.name != 'nt': # Temporary hack to get running on Windows again
37 37 raise util.Abort(
38 38 _('the python curses/wcurses module is not available/installed'))
39 39
40 40 _origstdout = sys.__stdout__ # used by gethw()
41 41
42 42 class patchnode(object):
43 43 """abstract class for patch graph nodes
44 44 (i.e. patchroot, header, hunk, hunkline)
45 45 """
46 46
47 47 def firstchild(self):
48 48 raise NotImplementedError("method must be implemented by subclass")
49 49
50 50 def lastchild(self):
51 51 raise NotImplementedError("method must be implemented by subclass")
52 52
53 53 def allchildren(self):
54 54 "Return a list of all of the direct children of this node"
55 55 raise NotImplementedError("method must be implemented by subclass")
56 56 def nextsibling(self):
57 57 """
58 58 Return the closest next item of the same type where there are no items
59 59 of different types between the current item and this closest item.
60 60 If no such item exists, return None.
61 61
62 62 """
63 63 raise NotImplementedError("method must be implemented by subclass")
64 64
65 65 def prevsibling(self):
66 66 """
67 67 Return the closest previous item of the same type where there are no
68 68 items of different types between the current item and this closest item.
69 69 If no such item exists, return None.
70 70
71 71 """
72 72 raise NotImplementedError("method must be implemented by subclass")
73 73
74 74 def parentitem(self):
75 75 raise NotImplementedError("method must be implemented by subclass")
76 76
77 77
78 78 def nextitem(self, constrainlevel=True, skipfolded=True):
79 79 """
80 80 If constrainLevel == True, return the closest next item
81 81 of the same type where there are no items of different types between
82 82 the current item and this closest item.
83 83
84 84 If constrainLevel == False, then try to return the next item
85 85 closest to this item, regardless of item's type (header, hunk, or
86 86 HunkLine).
87 87
88 88 If skipFolded == True, and the current item is folded, then the child
89 89 items that are hidden due to folding will be skipped when determining
90 90 the next item.
91 91
92 92 If it is not possible to get the next item, return None.
93 93
94 94 """
95 95 try:
96 96 itemfolded = self.folded
97 97 except AttributeError:
98 98 itemfolded = False
99 99 if constrainlevel:
100 100 return self.nextsibling()
101 101 elif skipfolded and itemfolded:
102 102 nextitem = self.nextsibling()
103 103 if nextitem is None:
104 104 try:
105 105 nextitem = self.parentitem().nextsibling()
106 106 except AttributeError:
107 107 nextitem = None
108 108 return nextitem
109 109 else:
110 110 # try child
111 111 item = self.firstchild()
112 112 if item is not None:
113 113 return item
114 114
115 115 # else try next sibling
116 116 item = self.nextsibling()
117 117 if item is not None:
118 118 return item
119 119
120 120 try:
121 121 # else try parent's next sibling
122 122 item = self.parentitem().nextsibling()
123 123 if item is not None:
124 124 return item
125 125
126 126 # else return grandparent's next sibling (or None)
127 127 return self.parentitem().parentitem().nextsibling()
128 128
129 129 except AttributeError: # parent and/or grandparent was None
130 130 return None
131 131
132 132 def previtem(self, constrainlevel=True, skipfolded=True):
133 133 """
134 134 If constrainLevel == True, return the closest previous item
135 135 of the same type where there are no items of different types between
136 136 the current item and this closest item.
137 137
138 138 If constrainLevel == False, then try to return the previous item
139 139 closest to this item, regardless of item's type (header, hunk, or
140 140 HunkLine).
141 141
142 142 If skipFolded == True, and the current item is folded, then the items
143 143 that are hidden due to folding will be skipped when determining the
144 144 next item.
145 145
146 146 If it is not possible to get the previous item, return None.
147 147
148 148 """
149 149 if constrainlevel:
150 150 return self.prevsibling()
151 151 else:
152 152 # try previous sibling's last child's last child,
153 153 # else try previous sibling's last child, else try previous sibling
154 154 prevsibling = self.prevsibling()
155 155 if prevsibling is not None:
156 156 prevsiblinglastchild = prevsibling.lastchild()
157 157 if ((prevsiblinglastchild is not None) and
158 158 not prevsibling.folded):
159 159 prevsiblinglclc = prevsiblinglastchild.lastchild()
160 160 if ((prevsiblinglclc is not None) and
161 161 not prevsiblinglastchild.folded):
162 162 return prevsiblinglclc
163 163 else:
164 164 return prevsiblinglastchild
165 165 else:
166 166 return prevsibling
167 167
168 168 # try parent (or None)
169 169 return self.parentitem()
170 170
171 171 class patch(patchnode, list): # todo: rename patchroot
172 172 """
173 173 list of header objects representing the patch.
174 174
175 175 """
176 176 def __init__(self, headerlist):
177 177 self.extend(headerlist)
178 178 # add parent patch object reference to each header
179 179 for header in self:
180 180 header.patch = self
181 181
182 182 class uiheader(patchnode):
183 183 """patch header
184 184
185 185 xxx shoudn't we move this to mercurial/patch.py ?
186 186 """
187 187
188 188 def __init__(self, header):
189 189 self.nonuiheader = header
190 190 # flag to indicate whether to apply this chunk
191 191 self.applied = True
192 192 # flag which only affects the status display indicating if a node's
193 193 # children are partially applied (i.e. some applied, some not).
194 194 self.partial = False
195 195
196 196 # flag to indicate whether to display as folded/unfolded to user
197 197 self.folded = True
198 198
199 199 # list of all headers in patch
200 200 self.patch = None
201 201
202 202 # flag is False if this header was ever unfolded from initial state
203 203 self.neverunfolded = True
204 204 self.hunks = [uihunk(h, self) for h in self.hunks]
205 205
206 206
207 207 def prettystr(self):
208 208 x = cStringIO.StringIO()
209 209 self.pretty(x)
210 210 return x.getvalue()
211 211
212 212 def nextsibling(self):
213 213 numheadersinpatch = len(self.patch)
214 214 indexofthisheader = self.patch.index(self)
215 215
216 216 if indexofthisheader < numheadersinpatch - 1:
217 217 nextheader = self.patch[indexofthisheader + 1]
218 218 return nextheader
219 219 else:
220 220 return None
221 221
222 222 def prevsibling(self):
223 223 indexofthisheader = self.patch.index(self)
224 224 if indexofthisheader > 0:
225 225 previousheader = self.patch[indexofthisheader - 1]
226 226 return previousheader
227 227 else:
228 228 return None
229 229
230 230 def parentitem(self):
231 231 """
232 232 there is no 'real' parent item of a header that can be selected,
233 233 so return None.
234 234 """
235 235 return None
236 236
237 237 def firstchild(self):
238 238 "return the first child of this item, if one exists. otherwise None."
239 239 if len(self.hunks) > 0:
240 240 return self.hunks[0]
241 241 else:
242 242 return None
243 243
244 244 def lastchild(self):
245 245 "return the last child of this item, if one exists. otherwise None."
246 246 if len(self.hunks) > 0:
247 247 return self.hunks[-1]
248 248 else:
249 249 return None
250 250
251 251 def allchildren(self):
252 252 "return a list of all of the direct children of this node"
253 253 return self.hunks
254 254
255 255 def __getattr__(self, name):
256 256 return getattr(self.nonuiheader, name)
257 257
258 258 class uihunkline(patchnode):
259 259 "represents a changed line in a hunk"
260 260 def __init__(self, linetext, hunk):
261 261 self.linetext = linetext
262 262 self.applied = True
263 263 # the parent hunk to which this line belongs
264 264 self.hunk = hunk
265 265 # folding lines currently is not used/needed, but this flag is needed
266 266 # in the previtem method.
267 267 self.folded = False
268 268
269 269 def prettystr(self):
270 270 return self.linetext
271 271
272 272 def nextsibling(self):
273 273 numlinesinhunk = len(self.hunk.changedlines)
274 274 indexofthisline = self.hunk.changedlines.index(self)
275 275
276 276 if (indexofthisline < numlinesinhunk - 1):
277 277 nextline = self.hunk.changedlines[indexofthisline + 1]
278 278 return nextline
279 279 else:
280 280 return None
281 281
282 282 def prevsibling(self):
283 283 indexofthisline = self.hunk.changedlines.index(self)
284 284 if indexofthisline > 0:
285 285 previousline = self.hunk.changedlines[indexofthisline - 1]
286 286 return previousline
287 287 else:
288 288 return None
289 289
290 290 def parentitem(self):
291 291 "return the parent to the current item"
292 292 return self.hunk
293 293
294 294 def firstchild(self):
295 295 "return the first child of this item, if one exists. otherwise None."
296 296 # hunk-lines don't have children
297 297 return None
298 298
299 299 def lastchild(self):
300 300 "return the last child of this item, if one exists. otherwise None."
301 301 # hunk-lines don't have children
302 302 return None
303 303
304 304 class uihunk(patchnode):
305 305 """ui patch hunk, wraps a hunk and keep track of ui behavior """
306 306 maxcontext = 3
307 307
308 308 def __init__(self, hunk, header):
309 309 self._hunk = hunk
310 310 self.changedlines = [uihunkline(line, self) for line in hunk.hunk]
311 311 self.header = header
312 312 # used at end for detecting how many removed lines were un-applied
313 313 self.originalremoved = self.removed
314 314
315 315 # flag to indicate whether to display as folded/unfolded to user
316 316 self.folded = True
317 317 # flag to indicate whether to apply this chunk
318 318 self.applied = True
319 319 # flag which only affects the status display indicating if a node's
320 320 # children are partially applied (i.e. some applied, some not).
321 321 self.partial = False
322 322
323 323 def nextsibling(self):
324 324 numhunksinheader = len(self.header.hunks)
325 325 indexofthishunk = self.header.hunks.index(self)
326 326
327 327 if (indexofthishunk < numhunksinheader - 1):
328 328 nexthunk = self.header.hunks[indexofthishunk + 1]
329 329 return nexthunk
330 330 else:
331 331 return None
332 332
333 333 def prevsibling(self):
334 334 indexofthishunk = self.header.hunks.index(self)
335 335 if indexofthishunk > 0:
336 336 previoushunk = self.header.hunks[indexofthishunk - 1]
337 337 return previoushunk
338 338 else:
339 339 return None
340 340
341 341 def parentitem(self):
342 342 "return the parent to the current item"
343 343 return self.header
344 344
345 345 def firstchild(self):
346 346 "return the first child of this item, if one exists. otherwise None."
347 347 if len(self.changedlines) > 0:
348 348 return self.changedlines[0]
349 349 else:
350 350 return None
351 351
352 352 def lastchild(self):
353 353 "return the last child of this item, if one exists. otherwise None."
354 354 if len(self.changedlines) > 0:
355 355 return self.changedlines[-1]
356 356 else:
357 357 return None
358 358
359 359 def allchildren(self):
360 360 "return a list of all of the direct children of this node"
361 361 return self.changedlines
362 362 def countchanges(self):
363 363 """changedlines -> (n+,n-)"""
364 364 add = len([l for l in self.changedlines if l.applied
365 365 and l.prettystr()[0] == '+'])
366 366 rem = len([l for l in self.changedlines if l.applied
367 367 and l.prettystr()[0] == '-'])
368 368 return add, rem
369 369
370 370 def getfromtoline(self):
371 371 # calculate the number of removed lines converted to context lines
372 372 removedconvertedtocontext = self.originalremoved - self.removed
373 373
374 374 contextlen = (len(self.before) + len(self.after) +
375 375 removedconvertedtocontext)
376 376 if self.after and self.after[-1] == '\\ no newline at end of file\n':
377 377 contextlen -= 1
378 378 fromlen = contextlen + self.removed
379 379 tolen = contextlen + self.added
380 380
381 381 # diffutils manual, section "2.2.2.2 detailed description of unified
382 382 # format": "an empty hunk is considered to end at the line that
383 383 # precedes the hunk."
384 384 #
385 385 # so, if either of hunks is empty, decrease its line start. --immerrr
386 386 # but only do this if fromline > 0, to avoid having, e.g fromline=-1.
387 387 fromline, toline = self.fromline, self.toline
388 388 if fromline != 0:
389 389 if fromlen == 0:
390 390 fromline -= 1
391 391 if tolen == 0:
392 392 toline -= 1
393 393
394 394 fromtoline = '@@ -%d,%d +%d,%d @@%s\n' % (
395 395 fromline, fromlen, toline, tolen,
396 396 self.proc and (' ' + self.proc))
397 397 return fromtoline
398 398
399 399 def write(self, fp):
400 400 # updated self.added/removed, which are used by getfromtoline()
401 401 self.added, self.removed = self.countchanges()
402 402 fp.write(self.getfromtoline())
403 403
404 404 hunklinelist = []
405 405 # add the following to the list: (1) all applied lines, and
406 406 # (2) all unapplied removal lines (convert these to context lines)
407 407 for changedline in self.changedlines:
408 408 changedlinestr = changedline.prettystr()
409 409 if changedline.applied:
410 410 hunklinelist.append(changedlinestr)
411 411 elif changedlinestr[0] == "-":
412 412 hunklinelist.append(" " + changedlinestr[1:])
413 413
414 414 fp.write(''.join(self.before + hunklinelist + self.after))
415 415
416 416 pretty = write
417 417
418 418 def prettystr(self):
419 419 x = cStringIO.StringIO()
420 420 self.pretty(x)
421 421 return x.getvalue()
422 422
423 423 def __getattr__(self, name):
424 424 return getattr(self._hunk, name)
425 425 def __repr__(self):
426 426 return '<hunk %r@%d>' % (self.filename(), self.fromline)
427 427
428 def filterpatch(ui, chunks, chunkselector):
428 def filterpatch(ui, chunks, chunkselector, operation=None):
429 429 """interactively filter patch chunks into applied-only chunks"""
430 430
431 431 chunks = list(chunks)
432 432 # convert chunks list into structure suitable for displaying/modifying
433 433 # with curses. create a list of headers only.
434 434 headers = [c for c in chunks if isinstance(c, patchmod.header)]
435 435
436 436 # if there are no changed files
437 437 if len(headers) == 0:
438 438 return []
439 439 uiheaders = [uiheader(h) for h in headers]
440 440 # let user choose headers/hunks/lines, and mark their applied flags
441 441 # accordingly
442 442 chunkselector(ui, uiheaders)
443 443 appliedhunklist = []
444 444 for hdr in uiheaders:
445 445 if (hdr.applied and
446 446 (hdr.special() or len([h for h in hdr.hunks if h.applied]) > 0)):
447 447 appliedhunklist.append(hdr)
448 448 fixoffset = 0
449 449 for hnk in hdr.hunks:
450 450 if hnk.applied:
451 451 appliedhunklist.append(hnk)
452 452 # adjust the 'to'-line offset of the hunk to be correct
453 453 # after de-activating some of the other hunks for this file
454 454 if fixoffset:
455 455 #hnk = copy.copy(hnk) # necessary??
456 456 hnk.toline += fixoffset
457 457 else:
458 458 fixoffset += hnk.removed - hnk.added
459 459
460 460 return appliedhunklist
461 461
462 462 def gethw():
463 463 """
464 464 magically get the current height and width of the window (without initscr)
465 465
466 466 this is a rip-off of a rip-off - taken from the bpython code. it is
467 467 useful / necessary because otherwise curses.initscr() must be called,
468 468 which can leave the terminal in a nasty state after exiting.
469 469
470 470 """
471 471 h, w = struct.unpack(
472 472 "hhhh", fcntl.ioctl(_origstdout, termios.TIOCGWINSZ, "\000"*8))[0:2]
473 473 return h, w
474 474
475 475 def chunkselector(ui, headerlist):
476 476 """
477 477 curses interface to get selection of chunks, and mark the applied flags
478 478 of the chosen chunks.
479 479
480 480 """
481 481 ui.write(_('starting interactive selection\n'))
482 482 chunkselector = curseschunkselector(headerlist, ui)
483 483 curses.wrapper(chunkselector.main)
484 484
485 485 def testdecorator(testfn, f):
486 486 def u(*args, **kwargs):
487 487 return f(testfn, *args, **kwargs)
488 488 return u
489 489
490 490 def testchunkselector(testfn, ui, headerlist):
491 491 """
492 492 test interface to get selection of chunks, and mark the applied flags
493 493 of the chosen chunks.
494 494
495 495 """
496 496 chunkselector = curseschunkselector(headerlist, ui)
497 497 if testfn and os.path.exists(testfn):
498 498 testf = open(testfn)
499 499 testcommands = map(lambda x: x.rstrip('\n'), testf.readlines())
500 500 testf.close()
501 501 while True:
502 502 if chunkselector.handlekeypressed(testcommands.pop(0), test=True):
503 503 break
504 504
505 505 class curseschunkselector(object):
506 506 def __init__(self, headerlist, ui):
507 507 # put the headers into a patch object
508 508 self.headerlist = patch(headerlist)
509 509
510 510 self.ui = ui
511 511
512 512 # list of all chunks
513 513 self.chunklist = []
514 514 for h in headerlist:
515 515 self.chunklist.append(h)
516 516 self.chunklist.extend(h.hunks)
517 517
518 518 # dictionary mapping (fgcolor, bgcolor) pairs to the
519 519 # corresponding curses color-pair value.
520 520 self.colorpairs = {}
521 521 # maps custom nicknames of color-pairs to curses color-pair values
522 522 self.colorpairnames = {}
523 523
524 524 # the currently selected header, hunk, or hunk-line
525 525 self.currentselecteditem = self.headerlist[0]
526 526
527 527 # updated when printing out patch-display -- the 'lines' here are the
528 528 # line positions *in the pad*, not on the screen.
529 529 self.selecteditemstartline = 0
530 530 self.selecteditemendline = None
531 531
532 532 # define indentation levels
533 533 self.headerindentnumchars = 0
534 534 self.hunkindentnumchars = 3
535 535 self.hunklineindentnumchars = 6
536 536
537 537 # the first line of the pad to print to the screen
538 538 self.firstlineofpadtoprint = 0
539 539
540 540 # keeps track of the number of lines in the pad
541 541 self.numpadlines = None
542 542
543 543 self.numstatuslines = 2
544 544
545 545 # keep a running count of the number of lines printed to the pad
546 546 # (used for determining when the selected item begins/ends)
547 547 self.linesprintedtopadsofar = 0
548 548
549 549 # the first line of the pad which is visible on the screen
550 550 self.firstlineofpadtoprint = 0
551 551
552 552 # stores optional text for a commit comment provided by the user
553 553 self.commenttext = ""
554 554
555 555 # if the last 'toggle all' command caused all changes to be applied
556 556 self.waslasttoggleallapplied = True
557 557
558 558 def uparrowevent(self):
559 559 """
560 560 try to select the previous item to the current item that has the
561 561 most-indented level. for example, if a hunk is selected, try to select
562 562 the last hunkline of the hunk prior to the selected hunk. or, if
563 563 the first hunkline of a hunk is currently selected, then select the
564 564 hunk itself.
565 565
566 566 if the currently selected item is already at the top of the screen,
567 567 scroll the screen down to show the new-selected item.
568 568
569 569 """
570 570 currentitem = self.currentselecteditem
571 571
572 572 nextitem = currentitem.previtem(constrainlevel=False)
573 573
574 574 if nextitem is None:
575 575 # if no parent item (i.e. currentitem is the first header), then
576 576 # no change...
577 577 nextitem = currentitem
578 578
579 579 self.currentselecteditem = nextitem
580 580
581 581 def uparrowshiftevent(self):
582 582 """
583 583 select (if possible) the previous item on the same level as the
584 584 currently selected item. otherwise, select (if possible) the
585 585 parent-item of the currently selected item.
586 586
587 587 if the currently selected item is already at the top of the screen,
588 588 scroll the screen down to show the new-selected item.
589 589
590 590 """
591 591 currentitem = self.currentselecteditem
592 592 nextitem = currentitem.previtem()
593 593 # if there's no previous item on this level, try choosing the parent
594 594 if nextitem is None:
595 595 nextitem = currentitem.parentitem()
596 596 if nextitem is None:
597 597 # if no parent item (i.e. currentitem is the first header), then
598 598 # no change...
599 599 nextitem = currentitem
600 600
601 601 self.currentselecteditem = nextitem
602 602
603 603 def downarrowevent(self):
604 604 """
605 605 try to select the next item to the current item that has the
606 606 most-indented level. for example, if a hunk is selected, select
607 607 the first hunkline of the selected hunk. or, if the last hunkline of
608 608 a hunk is currently selected, then select the next hunk, if one exists,
609 609 or if not, the next header if one exists.
610 610
611 611 if the currently selected item is already at the bottom of the screen,
612 612 scroll the screen up to show the new-selected item.
613 613
614 614 """
615 615 #self.startprintline += 1 #debug
616 616 currentitem = self.currentselecteditem
617 617
618 618 nextitem = currentitem.nextitem(constrainlevel=False)
619 619 # if there's no next item, keep the selection as-is
620 620 if nextitem is None:
621 621 nextitem = currentitem
622 622
623 623 self.currentselecteditem = nextitem
624 624
625 625 def downarrowshiftevent(self):
626 626 """
627 627 if the cursor is already at the bottom chunk, scroll the screen up and
628 628 move the cursor-position to the subsequent chunk. otherwise, only move
629 629 the cursor position down one chunk.
630 630
631 631 """
632 632 # todo: update docstring
633 633
634 634 currentitem = self.currentselecteditem
635 635 nextitem = currentitem.nextitem()
636 636 # if there's no previous item on this level, try choosing the parent's
637 637 # nextitem.
638 638 if nextitem is None:
639 639 try:
640 640 nextitem = currentitem.parentitem().nextitem()
641 641 except AttributeError:
642 642 # parentitem returned None, so nextitem() can't be called
643 643 nextitem = None
644 644 if nextitem is None:
645 645 # if no next item on parent-level, then no change...
646 646 nextitem = currentitem
647 647
648 648 self.currentselecteditem = nextitem
649 649
650 650 def rightarrowevent(self):
651 651 """
652 652 select (if possible) the first of this item's child-items.
653 653
654 654 """
655 655 currentitem = self.currentselecteditem
656 656 nextitem = currentitem.firstchild()
657 657
658 658 # turn off folding if we want to show a child-item
659 659 if currentitem.folded:
660 660 self.togglefolded(currentitem)
661 661
662 662 if nextitem is None:
663 663 # if no next item on parent-level, then no change...
664 664 nextitem = currentitem
665 665
666 666 self.currentselecteditem = nextitem
667 667
668 668 def leftarrowevent(self):
669 669 """
670 670 if the current item can be folded (i.e. it is an unfolded header or
671 671 hunk), then fold it. otherwise try select (if possible) the parent
672 672 of this item.
673 673
674 674 """
675 675 currentitem = self.currentselecteditem
676 676
677 677 # try to fold the item
678 678 if not isinstance(currentitem, uihunkline):
679 679 if not currentitem.folded:
680 680 self.togglefolded(item=currentitem)
681 681 return
682 682
683 683 # if it can't be folded, try to select the parent item
684 684 nextitem = currentitem.parentitem()
685 685
686 686 if nextitem is None:
687 687 # if no item on parent-level, then no change...
688 688 nextitem = currentitem
689 689 if not nextitem.folded:
690 690 self.togglefolded(item=nextitem)
691 691
692 692 self.currentselecteditem = nextitem
693 693
694 694 def leftarrowshiftevent(self):
695 695 """
696 696 select the header of the current item (or fold current item if the
697 697 current item is already a header).
698 698
699 699 """
700 700 currentitem = self.currentselecteditem
701 701
702 702 if isinstance(currentitem, uiheader):
703 703 if not currentitem.folded:
704 704 self.togglefolded(item=currentitem)
705 705 return
706 706
707 707 # select the parent item recursively until we're at a header
708 708 while True:
709 709 nextitem = currentitem.parentitem()
710 710 if nextitem is None:
711 711 break
712 712 else:
713 713 currentitem = nextitem
714 714
715 715 self.currentselecteditem = currentitem
716 716
717 717 def updatescroll(self):
718 718 "scroll the screen to fully show the currently-selected"
719 719 selstart = self.selecteditemstartline
720 720 selend = self.selecteditemendline
721 721 #selnumlines = selend - selstart
722 722 padstart = self.firstlineofpadtoprint
723 723 padend = padstart + self.yscreensize - self.numstatuslines - 1
724 724 # 'buffered' pad start/end values which scroll with a certain
725 725 # top/bottom context margin
726 726 padstartbuffered = padstart + 3
727 727 padendbuffered = padend - 3
728 728
729 729 if selend > padendbuffered:
730 730 self.scrolllines(selend - padendbuffered)
731 731 elif selstart < padstartbuffered:
732 732 # negative values scroll in pgup direction
733 733 self.scrolllines(selstart - padstartbuffered)
734 734
735 735
736 736 def scrolllines(self, numlines):
737 737 "scroll the screen up (down) by numlines when numlines >0 (<0)."
738 738 self.firstlineofpadtoprint += numlines
739 739 if self.firstlineofpadtoprint < 0:
740 740 self.firstlineofpadtoprint = 0
741 741 if self.firstlineofpadtoprint > self.numpadlines - 1:
742 742 self.firstlineofpadtoprint = self.numpadlines - 1
743 743
744 744 def toggleapply(self, item=None):
745 745 """
746 746 toggle the applied flag of the specified item. if no item is specified,
747 747 toggle the flag of the currently selected item.
748 748
749 749 """
750 750 if item is None:
751 751 item = self.currentselecteditem
752 752
753 753 item.applied = not item.applied
754 754
755 755 if isinstance(item, uiheader):
756 756 item.partial = False
757 757 if item.applied:
758 758 # apply all its hunks
759 759 for hnk in item.hunks:
760 760 hnk.applied = True
761 761 # apply all their hunklines
762 762 for hunkline in hnk.changedlines:
763 763 hunkline.applied = True
764 764 else:
765 765 # un-apply all its hunks
766 766 for hnk in item.hunks:
767 767 hnk.applied = False
768 768 hnk.partial = False
769 769 # un-apply all their hunklines
770 770 for hunkline in hnk.changedlines:
771 771 hunkline.applied = False
772 772 elif isinstance(item, uihunk):
773 773 item.partial = False
774 774 # apply all it's hunklines
775 775 for hunkline in item.changedlines:
776 776 hunkline.applied = item.applied
777 777
778 778 siblingappliedstatus = [hnk.applied for hnk in item.header.hunks]
779 779 allsiblingsapplied = not (False in siblingappliedstatus)
780 780 nosiblingsapplied = not (True in siblingappliedstatus)
781 781
782 782 siblingspartialstatus = [hnk.partial for hnk in item.header.hunks]
783 783 somesiblingspartial = (True in siblingspartialstatus)
784 784
785 785 #cases where applied or partial should be removed from header
786 786
787 787 # if no 'sibling' hunks are applied (including this hunk)
788 788 if nosiblingsapplied:
789 789 if not item.header.special():
790 790 item.header.applied = False
791 791 item.header.partial = False
792 792 else: # some/all parent siblings are applied
793 793 item.header.applied = True
794 794 item.header.partial = (somesiblingspartial or
795 795 not allsiblingsapplied)
796 796
797 797 elif isinstance(item, uihunkline):
798 798 siblingappliedstatus = [ln.applied for ln in item.hunk.changedlines]
799 799 allsiblingsapplied = not (False in siblingappliedstatus)
800 800 nosiblingsapplied = not (True in siblingappliedstatus)
801 801
802 802 # if no 'sibling' lines are applied
803 803 if nosiblingsapplied:
804 804 item.hunk.applied = False
805 805 item.hunk.partial = False
806 806 elif allsiblingsapplied:
807 807 item.hunk.applied = True
808 808 item.hunk.partial = False
809 809 else: # some siblings applied
810 810 item.hunk.applied = True
811 811 item.hunk.partial = True
812 812
813 813 parentsiblingsapplied = [hnk.applied for hnk
814 814 in item.hunk.header.hunks]
815 815 noparentsiblingsapplied = not (True in parentsiblingsapplied)
816 816 allparentsiblingsapplied = not (False in parentsiblingsapplied)
817 817
818 818 parentsiblingspartial = [hnk.partial for hnk
819 819 in item.hunk.header.hunks]
820 820 someparentsiblingspartial = (True in parentsiblingspartial)
821 821
822 822 # if all parent hunks are not applied, un-apply header
823 823 if noparentsiblingsapplied:
824 824 if not item.hunk.header.special():
825 825 item.hunk.header.applied = False
826 826 item.hunk.header.partial = False
827 827 # set the applied and partial status of the header if needed
828 828 else: # some/all parent siblings are applied
829 829 item.hunk.header.applied = True
830 830 item.hunk.header.partial = (someparentsiblingspartial or
831 831 not allparentsiblingsapplied)
832 832
833 833 def toggleall(self):
834 834 "toggle the applied flag of all items."
835 835 if self.waslasttoggleallapplied: # then unapply them this time
836 836 for item in self.headerlist:
837 837 if item.applied:
838 838 self.toggleapply(item)
839 839 else:
840 840 for item in self.headerlist:
841 841 if not item.applied:
842 842 self.toggleapply(item)
843 843 self.waslasttoggleallapplied = not self.waslasttoggleallapplied
844 844
845 845 def togglefolded(self, item=None, foldparent=False):
846 846 "toggle folded flag of specified item (defaults to currently selected)"
847 847 if item is None:
848 848 item = self.currentselecteditem
849 849 if foldparent or (isinstance(item, uiheader) and item.neverunfolded):
850 850 if not isinstance(item, uiheader):
851 851 # we need to select the parent item in this case
852 852 self.currentselecteditem = item = item.parentitem()
853 853 elif item.neverunfolded:
854 854 item.neverunfolded = False
855 855
856 856 # also fold any foldable children of the parent/current item
857 857 if isinstance(item, uiheader): # the original or 'new' item
858 858 for child in item.allchildren():
859 859 child.folded = not item.folded
860 860
861 861 if isinstance(item, (uiheader, uihunk)):
862 862 item.folded = not item.folded
863 863
864 864
865 865 def alignstring(self, instr, window):
866 866 """
867 867 add whitespace to the end of a string in order to make it fill
868 868 the screen in the x direction. the current cursor position is
869 869 taken into account when making this calculation. the string can span
870 870 multiple lines.
871 871
872 872 """
873 873 y, xstart = window.getyx()
874 874 width = self.xscreensize
875 875 # turn tabs into spaces
876 876 instr = instr.expandtabs(4)
877 877 strwidth = encoding.colwidth(instr)
878 878 numspaces = (width - ((strwidth + xstart) % width) - 1)
879 879 return instr + " " * numspaces + "\n"
880 880
881 881 def printstring(self, window, text, fgcolor=None, bgcolor=None, pair=None,
882 882 pairname=None, attrlist=None, towin=True, align=True, showwhtspc=False):
883 883 """
884 884 print the string, text, with the specified colors and attributes, to
885 885 the specified curses window object.
886 886
887 887 the foreground and background colors are of the form
888 888 curses.color_xxxx, where xxxx is one of: [black, blue, cyan, green,
889 889 magenta, red, white, yellow]. if pairname is provided, a color
890 890 pair will be looked up in the self.colorpairnames dictionary.
891 891
892 892 attrlist is a list containing text attributes in the form of
893 893 curses.a_xxxx, where xxxx can be: [bold, dim, normal, standout,
894 894 underline].
895 895
896 896 if align == True, whitespace is added to the printed string such that
897 897 the string stretches to the right border of the window.
898 898
899 899 if showwhtspc == True, trailing whitespace of a string is highlighted.
900 900
901 901 """
902 902 # preprocess the text, converting tabs to spaces
903 903 text = text.expandtabs(4)
904 904 # strip \n, and convert control characters to ^[char] representation
905 905 text = re.sub(r'[\x00-\x08\x0a-\x1f]',
906 906 lambda m:'^' + chr(ord(m.group()) + 64), text.strip('\n'))
907 907
908 908 if pair is not None:
909 909 colorpair = pair
910 910 elif pairname is not None:
911 911 colorpair = self.colorpairnames[pairname]
912 912 else:
913 913 if fgcolor is None:
914 914 fgcolor = -1
915 915 if bgcolor is None:
916 916 bgcolor = -1
917 917 if (fgcolor, bgcolor) in self.colorpairs:
918 918 colorpair = self.colorpairs[(fgcolor, bgcolor)]
919 919 else:
920 920 colorpair = self.getcolorpair(fgcolor, bgcolor)
921 921 # add attributes if possible
922 922 if attrlist is None:
923 923 attrlist = []
924 924 if colorpair < 256:
925 925 # then it is safe to apply all attributes
926 926 for textattr in attrlist:
927 927 colorpair |= textattr
928 928 else:
929 929 # just apply a select few (safe?) attributes
930 930 for textattr in (curses.A_UNDERLINE, curses.A_BOLD):
931 931 if textattr in attrlist:
932 932 colorpair |= textattr
933 933
934 934 y, xstart = self.chunkpad.getyx()
935 935 t = "" # variable for counting lines printed
936 936 # if requested, show trailing whitespace
937 937 if showwhtspc:
938 938 origlen = len(text)
939 939 text = text.rstrip(' \n') # tabs have already been expanded
940 940 strippedlen = len(text)
941 941 numtrailingspaces = origlen - strippedlen
942 942
943 943 if towin:
944 944 window.addstr(text, colorpair)
945 945 t += text
946 946
947 947 if showwhtspc:
948 948 wscolorpair = colorpair | curses.A_REVERSE
949 949 if towin:
950 950 for i in range(numtrailingspaces):
951 951 window.addch(curses.ACS_CKBOARD, wscolorpair)
952 952 t += " " * numtrailingspaces
953 953
954 954 if align:
955 955 if towin:
956 956 extrawhitespace = self.alignstring("", window)
957 957 window.addstr(extrawhitespace, colorpair)
958 958 else:
959 959 # need to use t, since the x position hasn't incremented
960 960 extrawhitespace = self.alignstring(t, window)
961 961 t += extrawhitespace
962 962
963 963 # is reset to 0 at the beginning of printitem()
964 964
965 965 linesprinted = (xstart + len(t)) / self.xscreensize
966 966 self.linesprintedtopadsofar += linesprinted
967 967 return t
968 968
969 969 def updatescreen(self):
970 970 self.statuswin.erase()
971 971 self.chunkpad.erase()
972 972
973 973 printstring = self.printstring
974 974
975 975 # print out the status lines at the top
976 976 try:
977 977 printstring(self.statuswin,
978 978 "SELECT CHUNKS: (j/k/up/dn/pgup/pgdn) move cursor; "
979 979 "(space/A) toggle hunk/all; (e)dit hunk;",
980 980 pairname="legend")
981 981 printstring(self.statuswin,
982 982 " (f)old/unfold; (c)onfirm applied; (q)uit; (?) help "
983 983 "| [X]=hunk applied **=folded",
984 984 pairname="legend")
985 985 except curses.error:
986 986 pass
987 987
988 988 # print out the patch in the remaining part of the window
989 989 try:
990 990 self.printitem()
991 991 self.updatescroll()
992 992 self.chunkpad.refresh(self.firstlineofpadtoprint, 0,
993 993 self.numstatuslines, 0,
994 994 self.yscreensize + 1 - self.numstatuslines,
995 995 self.xscreensize)
996 996 except curses.error:
997 997 pass
998 998
999 999 # refresh([pminrow, pmincol, sminrow, smincol, smaxrow, smaxcol])
1000 1000 self.statuswin.refresh()
1001 1001
1002 1002 def getstatusprefixstring(self, item):
1003 1003 """
1004 1004 create a string to prefix a line with which indicates whether 'item'
1005 1005 is applied and/or folded.
1006 1006
1007 1007 """
1008 1008 # create checkbox string
1009 1009 if item.applied:
1010 1010 if not isinstance(item, uihunkline) and item.partial:
1011 1011 checkbox = "[~]"
1012 1012 else:
1013 1013 checkbox = "[x]"
1014 1014 else:
1015 1015 checkbox = "[ ]"
1016 1016
1017 1017 try:
1018 1018 if item.folded:
1019 1019 checkbox += "**"
1020 1020 if isinstance(item, uiheader):
1021 1021 # one of "m", "a", or "d" (modified, added, deleted)
1022 1022 filestatus = item.changetype
1023 1023
1024 1024 checkbox += filestatus + " "
1025 1025 else:
1026 1026 checkbox += " "
1027 1027 if isinstance(item, uiheader):
1028 1028 # add two more spaces for headers
1029 1029 checkbox += " "
1030 1030 except AttributeError: # not foldable
1031 1031 checkbox += " "
1032 1032
1033 1033 return checkbox
1034 1034
1035 1035 def printheader(self, header, selected=False, towin=True,
1036 1036 ignorefolding=False):
1037 1037 """
1038 1038 print the header to the pad. if countlines is True, don't print
1039 1039 anything, but just count the number of lines which would be printed.
1040 1040
1041 1041 """
1042 1042 outstr = ""
1043 1043 text = header.prettystr()
1044 1044 chunkindex = self.chunklist.index(header)
1045 1045
1046 1046 if chunkindex != 0 and not header.folded:
1047 1047 # add separating line before headers
1048 1048 outstr += self.printstring(self.chunkpad, '_' * self.xscreensize,
1049 1049 towin=towin, align=False)
1050 1050 # select color-pair based on if the header is selected
1051 1051 colorpair = self.getcolorpair(name=selected and "selected" or "normal",
1052 1052 attrlist=[curses.A_BOLD])
1053 1053
1054 1054 # print out each line of the chunk, expanding it to screen width
1055 1055
1056 1056 # number of characters to indent lines on this level by
1057 1057 indentnumchars = 0
1058 1058 checkbox = self.getstatusprefixstring(header)
1059 1059 if not header.folded or ignorefolding:
1060 1060 textlist = text.split("\n")
1061 1061 linestr = checkbox + textlist[0]
1062 1062 else:
1063 1063 linestr = checkbox + header.filename()
1064 1064 outstr += self.printstring(self.chunkpad, linestr, pair=colorpair,
1065 1065 towin=towin)
1066 1066 if not header.folded or ignorefolding:
1067 1067 if len(textlist) > 1:
1068 1068 for line in textlist[1:]:
1069 1069 linestr = " "*(indentnumchars + len(checkbox)) + line
1070 1070 outstr += self.printstring(self.chunkpad, linestr,
1071 1071 pair=colorpair, towin=towin)
1072 1072
1073 1073 return outstr
1074 1074
1075 1075 def printhunklinesbefore(self, hunk, selected=False, towin=True,
1076 1076 ignorefolding=False):
1077 1077 "includes start/end line indicator"
1078 1078 outstr = ""
1079 1079 # where hunk is in list of siblings
1080 1080 hunkindex = hunk.header.hunks.index(hunk)
1081 1081
1082 1082 if hunkindex != 0:
1083 1083 # add separating line before headers
1084 1084 outstr += self.printstring(self.chunkpad, ' '*self.xscreensize,
1085 1085 towin=towin, align=False)
1086 1086
1087 1087 colorpair = self.getcolorpair(name=selected and "selected" or "normal",
1088 1088 attrlist=[curses.A_BOLD])
1089 1089
1090 1090 # print out from-to line with checkbox
1091 1091 checkbox = self.getstatusprefixstring(hunk)
1092 1092
1093 1093 lineprefix = " "*self.hunkindentnumchars + checkbox
1094 1094 frtoline = " " + hunk.getfromtoline().strip("\n")
1095 1095
1096 1096
1097 1097 outstr += self.printstring(self.chunkpad, lineprefix, towin=towin,
1098 1098 align=False) # add uncolored checkbox/indent
1099 1099 outstr += self.printstring(self.chunkpad, frtoline, pair=colorpair,
1100 1100 towin=towin)
1101 1101
1102 1102 if hunk.folded and not ignorefolding:
1103 1103 # skip remainder of output
1104 1104 return outstr
1105 1105
1106 1106 # print out lines of the chunk preceeding changed-lines
1107 1107 for line in hunk.before:
1108 1108 linestr = " "*(self.hunklineindentnumchars + len(checkbox)) + line
1109 1109 outstr += self.printstring(self.chunkpad, linestr, towin=towin)
1110 1110
1111 1111 return outstr
1112 1112
1113 1113 def printhunklinesafter(self, hunk, towin=True, ignorefolding=False):
1114 1114 outstr = ""
1115 1115 if hunk.folded and not ignorefolding:
1116 1116 return outstr
1117 1117
1118 1118 # a bit superfluous, but to avoid hard-coding indent amount
1119 1119 checkbox = self.getstatusprefixstring(hunk)
1120 1120 for line in hunk.after:
1121 1121 linestr = " "*(self.hunklineindentnumchars + len(checkbox)) + line
1122 1122 outstr += self.printstring(self.chunkpad, linestr, towin=towin)
1123 1123
1124 1124 return outstr
1125 1125
1126 1126 def printhunkchangedline(self, hunkline, selected=False, towin=True):
1127 1127 outstr = ""
1128 1128 checkbox = self.getstatusprefixstring(hunkline)
1129 1129
1130 1130 linestr = hunkline.prettystr().strip("\n")
1131 1131
1132 1132 # select color-pair based on whether line is an addition/removal
1133 1133 if selected:
1134 1134 colorpair = self.getcolorpair(name="selected")
1135 1135 elif linestr.startswith("+"):
1136 1136 colorpair = self.getcolorpair(name="addition")
1137 1137 elif linestr.startswith("-"):
1138 1138 colorpair = self.getcolorpair(name="deletion")
1139 1139 elif linestr.startswith("\\"):
1140 1140 colorpair = self.getcolorpair(name="normal")
1141 1141
1142 1142 lineprefix = " "*self.hunklineindentnumchars + checkbox
1143 1143 outstr += self.printstring(self.chunkpad, lineprefix, towin=towin,
1144 1144 align=False) # add uncolored checkbox/indent
1145 1145 outstr += self.printstring(self.chunkpad, linestr, pair=colorpair,
1146 1146 towin=towin, showwhtspc=True)
1147 1147 return outstr
1148 1148
1149 1149 def printitem(self, item=None, ignorefolding=False, recursechildren=True,
1150 1150 towin=True):
1151 1151 """
1152 1152 use __printitem() to print the the specified item.applied.
1153 1153 if item is not specified, then print the entire patch.
1154 1154 (hiding folded elements, etc. -- see __printitem() docstring)
1155 1155 """
1156 1156 if item is None:
1157 1157 item = self.headerlist
1158 1158 if recursechildren:
1159 1159 self.linesprintedtopadsofar = 0
1160 1160
1161 1161 outstr = []
1162 1162 self.__printitem(item, ignorefolding, recursechildren, outstr,
1163 1163 towin=towin)
1164 1164 return ''.join(outstr)
1165 1165
1166 1166 def outofdisplayedarea(self):
1167 1167 y, _ = self.chunkpad.getyx() # cursor location
1168 1168 # * 2 here works but an optimization would be the max number of
1169 1169 # consecutive non selectable lines
1170 1170 # i.e the max number of context line for any hunk in the patch
1171 1171 miny = min(0, self.firstlineofpadtoprint - self.yscreensize)
1172 1172 maxy = self.firstlineofpadtoprint + self.yscreensize * 2
1173 1173 return y < miny or y > maxy
1174 1174
1175 1175 def handleselection(self, item, recursechildren):
1176 1176 selected = (item is self.currentselecteditem)
1177 1177 if selected and recursechildren:
1178 1178 # assumes line numbering starting from line 0
1179 1179 self.selecteditemstartline = self.linesprintedtopadsofar
1180 1180 selecteditemlines = self.getnumlinesdisplayed(item,
1181 1181 recursechildren=False)
1182 1182 self.selecteditemendline = (self.selecteditemstartline +
1183 1183 selecteditemlines - 1)
1184 1184 return selected
1185 1185
1186 1186 def __printitem(self, item, ignorefolding, recursechildren, outstr,
1187 1187 towin=True):
1188 1188 """
1189 1189 recursive method for printing out patch/header/hunk/hunk-line data to
1190 1190 screen. also returns a string with all of the content of the displayed
1191 1191 patch (not including coloring, etc.).
1192 1192
1193 1193 if ignorefolding is True, then folded items are printed out.
1194 1194
1195 1195 if recursechildren is False, then only print the item without its
1196 1196 child items.
1197 1197
1198 1198 """
1199 1199 if towin and self.outofdisplayedarea():
1200 1200 return
1201 1201
1202 1202 selected = self.handleselection(item, recursechildren)
1203 1203
1204 1204 # patch object is a list of headers
1205 1205 if isinstance(item, patch):
1206 1206 if recursechildren:
1207 1207 for hdr in item:
1208 1208 self.__printitem(hdr, ignorefolding,
1209 1209 recursechildren, outstr, towin)
1210 1210 # todo: eliminate all isinstance() calls
1211 1211 if isinstance(item, uiheader):
1212 1212 outstr.append(self.printheader(item, selected, towin=towin,
1213 1213 ignorefolding=ignorefolding))
1214 1214 if recursechildren:
1215 1215 for hnk in item.hunks:
1216 1216 self.__printitem(hnk, ignorefolding,
1217 1217 recursechildren, outstr, towin)
1218 1218 elif (isinstance(item, uihunk) and
1219 1219 ((not item.header.folded) or ignorefolding)):
1220 1220 # print the hunk data which comes before the changed-lines
1221 1221 outstr.append(self.printhunklinesbefore(item, selected, towin=towin,
1222 1222 ignorefolding=ignorefolding))
1223 1223 if recursechildren:
1224 1224 for l in item.changedlines:
1225 1225 self.__printitem(l, ignorefolding,
1226 1226 recursechildren, outstr, towin)
1227 1227 outstr.append(self.printhunklinesafter(item, towin=towin,
1228 1228 ignorefolding=ignorefolding))
1229 1229 elif (isinstance(item, uihunkline) and
1230 1230 ((not item.hunk.folded) or ignorefolding)):
1231 1231 outstr.append(self.printhunkchangedline(item, selected,
1232 1232 towin=towin))
1233 1233
1234 1234 return outstr
1235 1235
1236 1236 def getnumlinesdisplayed(self, item=None, ignorefolding=False,
1237 1237 recursechildren=True):
1238 1238 """
1239 1239 return the number of lines which would be displayed if the item were
1240 1240 to be printed to the display. the item will not be printed to the
1241 1241 display (pad).
1242 1242 if no item is given, assume the entire patch.
1243 1243 if ignorefolding is True, folded items will be unfolded when counting
1244 1244 the number of lines.
1245 1245
1246 1246 """
1247 1247 # temporarily disable printing to windows by printstring
1248 1248 patchdisplaystring = self.printitem(item, ignorefolding,
1249 1249 recursechildren, towin=False)
1250 1250 numlines = len(patchdisplaystring) / self.xscreensize
1251 1251 return numlines
1252 1252
1253 1253 def sigwinchhandler(self, n, frame):
1254 1254 "handle window resizing"
1255 1255 try:
1256 1256 curses.endwin()
1257 1257 self.yscreensize, self.xscreensize = gethw()
1258 1258 self.statuswin.resize(self.numstatuslines, self.xscreensize)
1259 1259 self.numpadlines = self.getnumlinesdisplayed(ignorefolding=True) + 1
1260 1260 self.chunkpad = curses.newpad(self.numpadlines, self.xscreensize)
1261 1261 # todo: try to resize commit message window if possible
1262 1262 except curses.error:
1263 1263 pass
1264 1264
1265 1265 def getcolorpair(self, fgcolor=None, bgcolor=None, name=None,
1266 1266 attrlist=None):
1267 1267 """
1268 1268 get a curses color pair, adding it to self.colorpairs if it is not
1269 1269 already defined. an optional string, name, can be passed as a shortcut
1270 1270 for referring to the color-pair. by default, if no arguments are
1271 1271 specified, the white foreground / black background color-pair is
1272 1272 returned.
1273 1273
1274 1274 it is expected that this function will be used exclusively for
1275 1275 initializing color pairs, and not curses.init_pair().
1276 1276
1277 1277 attrlist is used to 'flavor' the returned color-pair. this information
1278 1278 is not stored in self.colorpairs. it contains attribute values like
1279 1279 curses.A_BOLD.
1280 1280
1281 1281 """
1282 1282 if (name is not None) and name in self.colorpairnames:
1283 1283 # then get the associated color pair and return it
1284 1284 colorpair = self.colorpairnames[name]
1285 1285 else:
1286 1286 if fgcolor is None:
1287 1287 fgcolor = -1
1288 1288 if bgcolor is None:
1289 1289 bgcolor = -1
1290 1290 if (fgcolor, bgcolor) in self.colorpairs:
1291 1291 colorpair = self.colorpairs[(fgcolor, bgcolor)]
1292 1292 else:
1293 1293 pairindex = len(self.colorpairs) + 1
1294 1294 curses.init_pair(pairindex, fgcolor, bgcolor)
1295 1295 colorpair = self.colorpairs[(fgcolor, bgcolor)] = (
1296 1296 curses.color_pair(pairindex))
1297 1297 if name is not None:
1298 1298 self.colorpairnames[name] = curses.color_pair(pairindex)
1299 1299
1300 1300 # add attributes if possible
1301 1301 if attrlist is None:
1302 1302 attrlist = []
1303 1303 if colorpair < 256:
1304 1304 # then it is safe to apply all attributes
1305 1305 for textattr in attrlist:
1306 1306 colorpair |= textattr
1307 1307 else:
1308 1308 # just apply a select few (safe?) attributes
1309 1309 for textattrib in (curses.A_UNDERLINE, curses.A_BOLD):
1310 1310 if textattrib in attrlist:
1311 1311 colorpair |= textattrib
1312 1312 return colorpair
1313 1313
1314 1314 def initcolorpair(self, *args, **kwargs):
1315 1315 "same as getcolorpair."
1316 1316 self.getcolorpair(*args, **kwargs)
1317 1317
1318 1318 def helpwindow(self):
1319 1319 "print a help window to the screen. exit after any keypress."
1320 1320 helptext = """ [press any key to return to the patch-display]
1321 1321
1322 1322 crecord allows you to interactively choose among the changes you have made,
1323 1323 and confirm only those changes you select for further processing by the command
1324 1324 you are running (commit/shelve/revert), after confirming the selected
1325 1325 changes, the unselected changes are still present in your working copy, so you
1326 1326 can use crecord multiple times to split large changes into smaller changesets.
1327 1327 the following are valid keystrokes:
1328 1328
1329 1329 [space] : (un-)select item ([~]/[x] = partly/fully applied)
1330 1330 a : (un-)select all items
1331 1331 up/down-arrow [k/j] : go to previous/next unfolded item
1332 1332 pgup/pgdn [k/j] : go to previous/next item of same type
1333 1333 right/left-arrow [l/h] : go to child item / parent item
1334 1334 shift-left-arrow [h] : go to parent header / fold selected header
1335 1335 f : fold / unfold item, hiding/revealing its children
1336 1336 f : fold / unfold parent item and all of its ancestors
1337 1337 m : edit / resume editing the commit message
1338 1338 e : edit the currently selected hunk
1339 1339 a : toggle amend mode (hg rev >= 2.2)
1340 1340 c : confirm selected changes
1341 1341 r : review/edit and confirm selected changes
1342 1342 q : quit without confirming (no changes will be made)
1343 1343 ? : help (what you're currently reading)"""
1344 1344
1345 1345 helpwin = curses.newwin(self.yscreensize, 0, 0, 0)
1346 1346 helplines = helptext.split("\n")
1347 1347 helplines = helplines + [" "]*(
1348 1348 self.yscreensize - self.numstatuslines - len(helplines) - 1)
1349 1349 try:
1350 1350 for line in helplines:
1351 1351 self.printstring(helpwin, line, pairname="legend")
1352 1352 except curses.error:
1353 1353 pass
1354 1354 helpwin.refresh()
1355 1355 try:
1356 1356 helpwin.getkey()
1357 1357 except curses.error:
1358 1358 pass
1359 1359
1360 1360 def confirmationwindow(self, windowtext):
1361 1361 "display an informational window, then wait for and return a keypress."
1362 1362
1363 1363 confirmwin = curses.newwin(self.yscreensize, 0, 0, 0)
1364 1364 try:
1365 1365 lines = windowtext.split("\n")
1366 1366 for line in lines:
1367 1367 self.printstring(confirmwin, line, pairname="selected")
1368 1368 except curses.error:
1369 1369 pass
1370 1370 self.stdscr.refresh()
1371 1371 confirmwin.refresh()
1372 1372 try:
1373 1373 response = chr(self.stdscr.getch())
1374 1374 except ValueError:
1375 1375 response = None
1376 1376
1377 1377 return response
1378 1378
1379 1379 def confirmcommit(self, review=False):
1380 1380 """ask for 'y' to be pressed to confirm selected. return True if
1381 1381 confirmed."""
1382 1382 if review:
1383 1383 confirmtext = (
1384 1384 """if you answer yes to the following, the your currently chosen patch chunks
1385 1385 will be loaded into an editor. you may modify the patch from the editor, and
1386 1386 save the changes if you wish to change the patch. otherwise, you can just
1387 1387 close the editor without saving to accept the current patch as-is.
1388 1388
1389 1389 note: don't add/remove lines unless you also modify the range information.
1390 1390 failing to follow this rule will result in the commit aborting.
1391 1391
1392 1392 are you sure you want to review/edit and confirm the selected changes [yn]?
1393 1393 """)
1394 1394 else:
1395 1395 confirmtext = (
1396 1396 "are you sure you want to confirm the selected changes [yn]? ")
1397 1397
1398 1398 response = self.confirmationwindow(confirmtext)
1399 1399 if response is None:
1400 1400 response = "n"
1401 1401 if response.lower().startswith("y"):
1402 1402 return True
1403 1403 else:
1404 1404 return False
1405 1405
1406 1406 def recenterdisplayedarea(self):
1407 1407 """
1408 1408 once we scrolled with pg up pg down we can be pointing outside of the
1409 1409 display zone. we print the patch with towin=False to compute the
1410 1410 location of the selected item eventhough it is outside of the displayed
1411 1411 zone and then update the scroll.
1412 1412 """
1413 1413 self.printitem(towin=False)
1414 1414 self.updatescroll()
1415 1415
1416 1416 def toggleedit(self, item=None, test=False):
1417 1417 """
1418 1418 edit the currently chelected chunk
1419 1419 """
1420 1420
1421 1421 def editpatchwitheditor(self, chunk):
1422 1422 if chunk is None:
1423 1423 self.ui.write(_('cannot edit patch for whole file'))
1424 1424 self.ui.write("\n")
1425 1425 return None
1426 1426 if chunk.header.binary():
1427 1427 self.ui.write(_('cannot edit patch for binary file'))
1428 1428 self.ui.write("\n")
1429 1429 return None
1430 1430 # patch comment based on the git one (based on comment at end of
1431 1431 # http://mercurial.selenic.com/wiki/recordextension)
1432 1432 phelp = '---' + _("""
1433 1433 to remove '-' lines, make them ' ' lines (context).
1434 1434 to remove '+' lines, delete them.
1435 1435 lines starting with # will be removed from the patch.
1436 1436
1437 1437 if the patch applies cleanly, the edited hunk will immediately be
1438 1438 added to the record list. if it does not apply cleanly, a rejects
1439 1439 file will be generated: you can use that when you try again. if
1440 1440 all lines of the hunk are removed, then the edit is aborted and
1441 1441 the hunk is left unchanged.
1442 1442 """)
1443 1443 (patchfd, patchfn) = tempfile.mkstemp(prefix="hg-editor-",
1444 1444 suffix=".diff", text=True)
1445 1445 ncpatchfp = None
1446 1446 try:
1447 1447 # write the initial patch
1448 1448 f = os.fdopen(patchfd, "w")
1449 1449 chunk.header.write(f)
1450 1450 chunk.write(f)
1451 1451 f.write('\n'.join(['# ' + i for i in phelp.splitlines()]))
1452 1452 f.close()
1453 1453 # start the editor and wait for it to complete
1454 1454 editor = self.ui.geteditor()
1455 1455 self.ui.system("%s \"%s\"" % (editor, patchfn),
1456 1456 environ={'hguser': self.ui.username()},
1457 1457 onerr=util.Abort, errprefix=_("edit failed"))
1458 1458 # remove comment lines
1459 1459 patchfp = open(patchfn)
1460 1460 ncpatchfp = cStringIO.StringIO()
1461 1461 for line in patchfp:
1462 1462 if not line.startswith('#'):
1463 1463 ncpatchfp.write(line)
1464 1464 patchfp.close()
1465 1465 ncpatchfp.seek(0)
1466 1466 newpatches = patchmod.parsepatch(ncpatchfp)
1467 1467 finally:
1468 1468 os.unlink(patchfn)
1469 1469 del ncpatchfp
1470 1470 return newpatches
1471 1471 if item is None:
1472 1472 item = self.currentselecteditem
1473 1473 if isinstance(item, uiheader):
1474 1474 return
1475 1475 if isinstance(item, uihunkline):
1476 1476 item = item.parentitem()
1477 1477 if not isinstance(item, uihunk):
1478 1478 return
1479 1479
1480 1480 beforeadded, beforeremoved = item.added, item.removed
1481 1481 newpatches = editpatchwitheditor(self, item)
1482 1482 header = item.header
1483 1483 editedhunkindex = header.hunks.index(item)
1484 1484 hunksbefore = header.hunks[:editedhunkindex]
1485 1485 hunksafter = header.hunks[editedhunkindex + 1:]
1486 1486 newpatchheader = newpatches[0]
1487 1487 newhunks = [uihunk(h, header) for h in newpatchheader.hunks]
1488 1488 newadded = sum([h.added for h in newhunks])
1489 1489 newremoved = sum([h.removed for h in newhunks])
1490 1490 offset = (newadded - beforeadded) - (newremoved - beforeremoved)
1491 1491
1492 1492 for h in hunksafter:
1493 1493 h.toline += offset
1494 1494 for h in newhunks:
1495 1495 h.folded = False
1496 1496 header.hunks = hunksbefore + newhunks + hunksafter
1497 1497 if self.emptypatch():
1498 1498 header.hunks = hunksbefore + [item] + hunksafter
1499 1499 self.currentselecteditem = header
1500 1500
1501 1501 if not test:
1502 1502 self.numpadlines = self.getnumlinesdisplayed(ignorefolding=True) + 1
1503 1503 self.chunkpad = curses.newpad(self.numpadlines, self.xscreensize)
1504 1504 self.updatescroll()
1505 1505 self.stdscr.refresh()
1506 1506 self.statuswin.refresh()
1507 1507 self.stdscr.keypad(1)
1508 1508
1509 1509 def emptypatch(self):
1510 1510 item = self.headerlist
1511 1511 if not item:
1512 1512 return True
1513 1513 for header in item:
1514 1514 if header.hunks:
1515 1515 return False
1516 1516 return True
1517 1517
1518 1518 def handlekeypressed(self, keypressed, test=False):
1519 1519 if keypressed in ["k", "KEY_UP"]:
1520 1520 self.uparrowevent()
1521 1521 if keypressed in ["k", "KEY_PPAGE"]:
1522 1522 self.uparrowshiftevent()
1523 1523 elif keypressed in ["j", "KEY_DOWN"]:
1524 1524 self.downarrowevent()
1525 1525 elif keypressed in ["j", "KEY_NPAGE"]:
1526 1526 self.downarrowshiftevent()
1527 1527 elif keypressed in ["l", "KEY_RIGHT"]:
1528 1528 self.rightarrowevent()
1529 1529 elif keypressed in ["h", "KEY_LEFT"]:
1530 1530 self.leftarrowevent()
1531 1531 elif keypressed in ["h", "KEY_SLEFT"]:
1532 1532 self.leftarrowshiftevent()
1533 1533 elif keypressed in ["q"]:
1534 1534 raise util.Abort(_('user quit'))
1535 1535 elif keypressed in ["c"]:
1536 1536 if self.confirmcommit():
1537 1537 return True
1538 1538 elif keypressed in ["r"]:
1539 1539 if self.confirmcommit(review=True):
1540 1540 return True
1541 1541 elif test and keypressed in ['X']:
1542 1542 return True
1543 1543 elif keypressed in [' '] or (test and keypressed in ["TOGGLE"]):
1544 1544 self.toggleapply()
1545 1545 elif keypressed in ['A']:
1546 1546 self.toggleall()
1547 1547 elif keypressed in ['e']:
1548 1548 self.toggleedit(test=test)
1549 1549 elif keypressed in ["f"]:
1550 1550 self.togglefolded()
1551 1551 elif keypressed in ["f"]:
1552 1552 self.togglefolded(foldparent=True)
1553 1553 elif keypressed in ["?"]:
1554 1554 self.helpwindow()
1555 1555
1556 1556 def main(self, stdscr):
1557 1557 """
1558 1558 method to be wrapped by curses.wrapper() for selecting chunks.
1559 1559
1560 1560 """
1561 1561 signal.signal(signal.SIGWINCH, self.sigwinchhandler)
1562 1562 self.stdscr = stdscr
1563 1563 self.yscreensize, self.xscreensize = self.stdscr.getmaxyx()
1564 1564
1565 1565 curses.start_color()
1566 1566 curses.use_default_colors()
1567 1567
1568 1568 # available colors: black, blue, cyan, green, magenta, white, yellow
1569 1569 # init_pair(color_id, foreground_color, background_color)
1570 1570 self.initcolorpair(None, None, name="normal")
1571 1571 self.initcolorpair(curses.COLOR_WHITE, curses.COLOR_MAGENTA,
1572 1572 name="selected")
1573 1573 self.initcolorpair(curses.COLOR_RED, None, name="deletion")
1574 1574 self.initcolorpair(curses.COLOR_GREEN, None, name="addition")
1575 1575 self.initcolorpair(curses.COLOR_WHITE, curses.COLOR_BLUE, name="legend")
1576 1576 # newwin([height, width,] begin_y, begin_x)
1577 1577 self.statuswin = curses.newwin(self.numstatuslines, 0, 0, 0)
1578 1578 self.statuswin.keypad(1) # interpret arrow-key, etc. esc sequences
1579 1579
1580 1580 # figure out how much space to allocate for the chunk-pad which is
1581 1581 # used for displaying the patch
1582 1582
1583 1583 # stupid hack to prevent getnumlinesdisplayed from failing
1584 1584 self.chunkpad = curses.newpad(1, self.xscreensize)
1585 1585
1586 1586 # add 1 so to account for last line text reaching end of line
1587 1587 self.numpadlines = self.getnumlinesdisplayed(ignorefolding=True) + 1
1588 1588 self.chunkpad = curses.newpad(self.numpadlines, self.xscreensize)
1589 1589
1590 1590 # initialize selecteitemendline (initial start-line is 0)
1591 1591 self.selecteditemendline = self.getnumlinesdisplayed(
1592 1592 self.currentselecteditem, recursechildren=False)
1593 1593
1594 1594 while True:
1595 1595 self.updatescreen()
1596 1596 try:
1597 1597 keypressed = self.statuswin.getkey()
1598 1598 except curses.error:
1599 1599 keypressed = "foobar"
1600 1600 if self.handlekeypressed(keypressed):
1601 1601 break
@@ -1,2478 +1,2478 b''
1 1 # patch.py - patch file parsing routines
2 2 #
3 3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
4 4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 import collections
10 10 import cStringIO, email, os, errno, re, posixpath, copy
11 11 import tempfile, zlib, shutil
12 12 # On python2.4 you have to import these by name or they fail to
13 13 # load. This was not a problem on Python 2.7.
14 14 import email.Generator
15 15 import email.Parser
16 16
17 17 from i18n import _
18 18 from node import hex, short
19 19 import base85, mdiff, scmutil, util, diffhelpers, copies, encoding, error
20 20 import pathutil
21 21
22 22 gitre = re.compile('diff --git a/(.*) b/(.*)')
23 23 tabsplitter = re.compile(r'(\t+|[^\t]+)')
24 24
25 25 class PatchError(Exception):
26 26 pass
27 27
28 28
29 29 # public functions
30 30
31 31 def split(stream):
32 32 '''return an iterator of individual patches from a stream'''
33 33 def isheader(line, inheader):
34 34 if inheader and line[0] in (' ', '\t'):
35 35 # continuation
36 36 return True
37 37 if line[0] in (' ', '-', '+'):
38 38 # diff line - don't check for header pattern in there
39 39 return False
40 40 l = line.split(': ', 1)
41 41 return len(l) == 2 and ' ' not in l[0]
42 42
43 43 def chunk(lines):
44 44 return cStringIO.StringIO(''.join(lines))
45 45
46 46 def hgsplit(stream, cur):
47 47 inheader = True
48 48
49 49 for line in stream:
50 50 if not line.strip():
51 51 inheader = False
52 52 if not inheader and line.startswith('# HG changeset patch'):
53 53 yield chunk(cur)
54 54 cur = []
55 55 inheader = True
56 56
57 57 cur.append(line)
58 58
59 59 if cur:
60 60 yield chunk(cur)
61 61
62 62 def mboxsplit(stream, cur):
63 63 for line in stream:
64 64 if line.startswith('From '):
65 65 for c in split(chunk(cur[1:])):
66 66 yield c
67 67 cur = []
68 68
69 69 cur.append(line)
70 70
71 71 if cur:
72 72 for c in split(chunk(cur[1:])):
73 73 yield c
74 74
75 75 def mimesplit(stream, cur):
76 76 def msgfp(m):
77 77 fp = cStringIO.StringIO()
78 78 g = email.Generator.Generator(fp, mangle_from_=False)
79 79 g.flatten(m)
80 80 fp.seek(0)
81 81 return fp
82 82
83 83 for line in stream:
84 84 cur.append(line)
85 85 c = chunk(cur)
86 86
87 87 m = email.Parser.Parser().parse(c)
88 88 if not m.is_multipart():
89 89 yield msgfp(m)
90 90 else:
91 91 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
92 92 for part in m.walk():
93 93 ct = part.get_content_type()
94 94 if ct not in ok_types:
95 95 continue
96 96 yield msgfp(part)
97 97
98 98 def headersplit(stream, cur):
99 99 inheader = False
100 100
101 101 for line in stream:
102 102 if not inheader and isheader(line, inheader):
103 103 yield chunk(cur)
104 104 cur = []
105 105 inheader = True
106 106 if inheader and not isheader(line, inheader):
107 107 inheader = False
108 108
109 109 cur.append(line)
110 110
111 111 if cur:
112 112 yield chunk(cur)
113 113
114 114 def remainder(cur):
115 115 yield chunk(cur)
116 116
117 117 class fiter(object):
118 118 def __init__(self, fp):
119 119 self.fp = fp
120 120
121 121 def __iter__(self):
122 122 return self
123 123
124 124 def next(self):
125 125 l = self.fp.readline()
126 126 if not l:
127 127 raise StopIteration
128 128 return l
129 129
130 130 inheader = False
131 131 cur = []
132 132
133 133 mimeheaders = ['content-type']
134 134
135 135 if not util.safehasattr(stream, 'next'):
136 136 # http responses, for example, have readline but not next
137 137 stream = fiter(stream)
138 138
139 139 for line in stream:
140 140 cur.append(line)
141 141 if line.startswith('# HG changeset patch'):
142 142 return hgsplit(stream, cur)
143 143 elif line.startswith('From '):
144 144 return mboxsplit(stream, cur)
145 145 elif isheader(line, inheader):
146 146 inheader = True
147 147 if line.split(':', 1)[0].lower() in mimeheaders:
148 148 # let email parser handle this
149 149 return mimesplit(stream, cur)
150 150 elif line.startswith('--- ') and inheader:
151 151 # No evil headers seen by diff start, split by hand
152 152 return headersplit(stream, cur)
153 153 # Not enough info, keep reading
154 154
155 155 # if we are here, we have a very plain patch
156 156 return remainder(cur)
157 157
158 158 def extract(ui, fileobj):
159 159 '''extract patch from data read from fileobj.
160 160
161 161 patch can be a normal patch or contained in an email message.
162 162
163 163 return tuple (filename, message, user, date, branch, node, p1, p2).
164 164 Any item in the returned tuple can be None. If filename is None,
165 165 fileobj did not contain a patch. Caller must unlink filename when done.'''
166 166
167 167 # attempt to detect the start of a patch
168 168 # (this heuristic is borrowed from quilt)
169 169 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |'
170 170 r'retrieving revision [0-9]+(\.[0-9]+)*$|'
171 171 r'---[ \t].*?^\+\+\+[ \t]|'
172 172 r'\*\*\*[ \t].*?^---[ \t])', re.MULTILINE|re.DOTALL)
173 173
174 174 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
175 175 tmpfp = os.fdopen(fd, 'w')
176 176 try:
177 177 msg = email.Parser.Parser().parse(fileobj)
178 178
179 179 subject = msg['Subject']
180 180 user = msg['From']
181 181 if not subject and not user:
182 182 # Not an email, restore parsed headers if any
183 183 subject = '\n'.join(': '.join(h) for h in msg.items()) + '\n'
184 184
185 185 # should try to parse msg['Date']
186 186 date = None
187 187 nodeid = None
188 188 branch = None
189 189 parents = []
190 190
191 191 if subject:
192 192 if subject.startswith('[PATCH'):
193 193 pend = subject.find(']')
194 194 if pend >= 0:
195 195 subject = subject[pend + 1:].lstrip()
196 196 subject = re.sub(r'\n[ \t]+', ' ', subject)
197 197 ui.debug('Subject: %s\n' % subject)
198 198 if user:
199 199 ui.debug('From: %s\n' % user)
200 200 diffs_seen = 0
201 201 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
202 202 message = ''
203 203 for part in msg.walk():
204 204 content_type = part.get_content_type()
205 205 ui.debug('Content-Type: %s\n' % content_type)
206 206 if content_type not in ok_types:
207 207 continue
208 208 payload = part.get_payload(decode=True)
209 209 m = diffre.search(payload)
210 210 if m:
211 211 hgpatch = False
212 212 hgpatchheader = False
213 213 ignoretext = False
214 214
215 215 ui.debug('found patch at byte %d\n' % m.start(0))
216 216 diffs_seen += 1
217 217 cfp = cStringIO.StringIO()
218 218 for line in payload[:m.start(0)].splitlines():
219 219 if line.startswith('# HG changeset patch') and not hgpatch:
220 220 ui.debug('patch generated by hg export\n')
221 221 hgpatch = True
222 222 hgpatchheader = True
223 223 # drop earlier commit message content
224 224 cfp.seek(0)
225 225 cfp.truncate()
226 226 subject = None
227 227 elif hgpatchheader:
228 228 if line.startswith('# User '):
229 229 user = line[7:]
230 230 ui.debug('From: %s\n' % user)
231 231 elif line.startswith("# Date "):
232 232 date = line[7:]
233 233 elif line.startswith("# Branch "):
234 234 branch = line[9:]
235 235 elif line.startswith("# Node ID "):
236 236 nodeid = line[10:]
237 237 elif line.startswith("# Parent "):
238 238 parents.append(line[9:].lstrip())
239 239 elif not line.startswith("# "):
240 240 hgpatchheader = False
241 241 elif line == '---':
242 242 ignoretext = True
243 243 if not hgpatchheader and not ignoretext:
244 244 cfp.write(line)
245 245 cfp.write('\n')
246 246 message = cfp.getvalue()
247 247 if tmpfp:
248 248 tmpfp.write(payload)
249 249 if not payload.endswith('\n'):
250 250 tmpfp.write('\n')
251 251 elif not diffs_seen and message and content_type == 'text/plain':
252 252 message += '\n' + payload
253 253 except: # re-raises
254 254 tmpfp.close()
255 255 os.unlink(tmpname)
256 256 raise
257 257
258 258 if subject and not message.startswith(subject):
259 259 message = '%s\n%s' % (subject, message)
260 260 tmpfp.close()
261 261 if not diffs_seen:
262 262 os.unlink(tmpname)
263 263 return None, message, user, date, branch, None, None, None
264 264
265 265 if parents:
266 266 p1 = parents.pop(0)
267 267 else:
268 268 p1 = None
269 269
270 270 if parents:
271 271 p2 = parents.pop(0)
272 272 else:
273 273 p2 = None
274 274
275 275 return tmpname, message, user, date, branch, nodeid, p1, p2
276 276
277 277 class patchmeta(object):
278 278 """Patched file metadata
279 279
280 280 'op' is the performed operation within ADD, DELETE, RENAME, MODIFY
281 281 or COPY. 'path' is patched file path. 'oldpath' is set to the
282 282 origin file when 'op' is either COPY or RENAME, None otherwise. If
283 283 file mode is changed, 'mode' is a tuple (islink, isexec) where
284 284 'islink' is True if the file is a symlink and 'isexec' is True if
285 285 the file is executable. Otherwise, 'mode' is None.
286 286 """
287 287 def __init__(self, path):
288 288 self.path = path
289 289 self.oldpath = None
290 290 self.mode = None
291 291 self.op = 'MODIFY'
292 292 self.binary = False
293 293
294 294 def setmode(self, mode):
295 295 islink = mode & 020000
296 296 isexec = mode & 0100
297 297 self.mode = (islink, isexec)
298 298
299 299 def copy(self):
300 300 other = patchmeta(self.path)
301 301 other.oldpath = self.oldpath
302 302 other.mode = self.mode
303 303 other.op = self.op
304 304 other.binary = self.binary
305 305 return other
306 306
307 307 def _ispatchinga(self, afile):
308 308 if afile == '/dev/null':
309 309 return self.op == 'ADD'
310 310 return afile == 'a/' + (self.oldpath or self.path)
311 311
312 312 def _ispatchingb(self, bfile):
313 313 if bfile == '/dev/null':
314 314 return self.op == 'DELETE'
315 315 return bfile == 'b/' + self.path
316 316
317 317 def ispatching(self, afile, bfile):
318 318 return self._ispatchinga(afile) and self._ispatchingb(bfile)
319 319
320 320 def __repr__(self):
321 321 return "<patchmeta %s %r>" % (self.op, self.path)
322 322
323 323 def readgitpatch(lr):
324 324 """extract git-style metadata about patches from <patchname>"""
325 325
326 326 # Filter patch for git information
327 327 gp = None
328 328 gitpatches = []
329 329 for line in lr:
330 330 line = line.rstrip(' \r\n')
331 331 if line.startswith('diff --git a/'):
332 332 m = gitre.match(line)
333 333 if m:
334 334 if gp:
335 335 gitpatches.append(gp)
336 336 dst = m.group(2)
337 337 gp = patchmeta(dst)
338 338 elif gp:
339 339 if line.startswith('--- '):
340 340 gitpatches.append(gp)
341 341 gp = None
342 342 continue
343 343 if line.startswith('rename from '):
344 344 gp.op = 'RENAME'
345 345 gp.oldpath = line[12:]
346 346 elif line.startswith('rename to '):
347 347 gp.path = line[10:]
348 348 elif line.startswith('copy from '):
349 349 gp.op = 'COPY'
350 350 gp.oldpath = line[10:]
351 351 elif line.startswith('copy to '):
352 352 gp.path = line[8:]
353 353 elif line.startswith('deleted file'):
354 354 gp.op = 'DELETE'
355 355 elif line.startswith('new file mode '):
356 356 gp.op = 'ADD'
357 357 gp.setmode(int(line[-6:], 8))
358 358 elif line.startswith('new mode '):
359 359 gp.setmode(int(line[-6:], 8))
360 360 elif line.startswith('GIT binary patch'):
361 361 gp.binary = True
362 362 if gp:
363 363 gitpatches.append(gp)
364 364
365 365 return gitpatches
366 366
367 367 class linereader(object):
368 368 # simple class to allow pushing lines back into the input stream
369 369 def __init__(self, fp):
370 370 self.fp = fp
371 371 self.buf = []
372 372
373 373 def push(self, line):
374 374 if line is not None:
375 375 self.buf.append(line)
376 376
377 377 def readline(self):
378 378 if self.buf:
379 379 l = self.buf[0]
380 380 del self.buf[0]
381 381 return l
382 382 return self.fp.readline()
383 383
384 384 def __iter__(self):
385 385 while True:
386 386 l = self.readline()
387 387 if not l:
388 388 break
389 389 yield l
390 390
391 391 class abstractbackend(object):
392 392 def __init__(self, ui):
393 393 self.ui = ui
394 394
395 395 def getfile(self, fname):
396 396 """Return target file data and flags as a (data, (islink,
397 397 isexec)) tuple. Data is None if file is missing/deleted.
398 398 """
399 399 raise NotImplementedError
400 400
401 401 def setfile(self, fname, data, mode, copysource):
402 402 """Write data to target file fname and set its mode. mode is a
403 403 (islink, isexec) tuple. If data is None, the file content should
404 404 be left unchanged. If the file is modified after being copied,
405 405 copysource is set to the original file name.
406 406 """
407 407 raise NotImplementedError
408 408
409 409 def unlink(self, fname):
410 410 """Unlink target file."""
411 411 raise NotImplementedError
412 412
413 413 def writerej(self, fname, failed, total, lines):
414 414 """Write rejected lines for fname. total is the number of hunks
415 415 which failed to apply and total the total number of hunks for this
416 416 files.
417 417 """
418 418 pass
419 419
420 420 def exists(self, fname):
421 421 raise NotImplementedError
422 422
423 423 class fsbackend(abstractbackend):
424 424 def __init__(self, ui, basedir):
425 425 super(fsbackend, self).__init__(ui)
426 426 self.opener = scmutil.opener(basedir)
427 427
428 428 def _join(self, f):
429 429 return os.path.join(self.opener.base, f)
430 430
431 431 def getfile(self, fname):
432 432 if self.opener.islink(fname):
433 433 return (self.opener.readlink(fname), (True, False))
434 434
435 435 isexec = False
436 436 try:
437 437 isexec = self.opener.lstat(fname).st_mode & 0100 != 0
438 438 except OSError, e:
439 439 if e.errno != errno.ENOENT:
440 440 raise
441 441 try:
442 442 return (self.opener.read(fname), (False, isexec))
443 443 except IOError, e:
444 444 if e.errno != errno.ENOENT:
445 445 raise
446 446 return None, None
447 447
448 448 def setfile(self, fname, data, mode, copysource):
449 449 islink, isexec = mode
450 450 if data is None:
451 451 self.opener.setflags(fname, islink, isexec)
452 452 return
453 453 if islink:
454 454 self.opener.symlink(data, fname)
455 455 else:
456 456 self.opener.write(fname, data)
457 457 if isexec:
458 458 self.opener.setflags(fname, False, True)
459 459
460 460 def unlink(self, fname):
461 461 self.opener.unlinkpath(fname, ignoremissing=True)
462 462
463 463 def writerej(self, fname, failed, total, lines):
464 464 fname = fname + ".rej"
465 465 self.ui.warn(
466 466 _("%d out of %d hunks FAILED -- saving rejects to file %s\n") %
467 467 (failed, total, fname))
468 468 fp = self.opener(fname, 'w')
469 469 fp.writelines(lines)
470 470 fp.close()
471 471
472 472 def exists(self, fname):
473 473 return self.opener.lexists(fname)
474 474
475 475 class workingbackend(fsbackend):
476 476 def __init__(self, ui, repo, similarity):
477 477 super(workingbackend, self).__init__(ui, repo.root)
478 478 self.repo = repo
479 479 self.similarity = similarity
480 480 self.removed = set()
481 481 self.changed = set()
482 482 self.copied = []
483 483
484 484 def _checkknown(self, fname):
485 485 if self.repo.dirstate[fname] == '?' and self.exists(fname):
486 486 raise PatchError(_('cannot patch %s: file is not tracked') % fname)
487 487
488 488 def setfile(self, fname, data, mode, copysource):
489 489 self._checkknown(fname)
490 490 super(workingbackend, self).setfile(fname, data, mode, copysource)
491 491 if copysource is not None:
492 492 self.copied.append((copysource, fname))
493 493 self.changed.add(fname)
494 494
495 495 def unlink(self, fname):
496 496 self._checkknown(fname)
497 497 super(workingbackend, self).unlink(fname)
498 498 self.removed.add(fname)
499 499 self.changed.add(fname)
500 500
501 501 def close(self):
502 502 wctx = self.repo[None]
503 503 changed = set(self.changed)
504 504 for src, dst in self.copied:
505 505 scmutil.dirstatecopy(self.ui, self.repo, wctx, src, dst)
506 506 if self.removed:
507 507 wctx.forget(sorted(self.removed))
508 508 for f in self.removed:
509 509 if f not in self.repo.dirstate:
510 510 # File was deleted and no longer belongs to the
511 511 # dirstate, it was probably marked added then
512 512 # deleted, and should not be considered by
513 513 # marktouched().
514 514 changed.discard(f)
515 515 if changed:
516 516 scmutil.marktouched(self.repo, changed, self.similarity)
517 517 return sorted(self.changed)
518 518
519 519 class filestore(object):
520 520 def __init__(self, maxsize=None):
521 521 self.opener = None
522 522 self.files = {}
523 523 self.created = 0
524 524 self.maxsize = maxsize
525 525 if self.maxsize is None:
526 526 self.maxsize = 4*(2**20)
527 527 self.size = 0
528 528 self.data = {}
529 529
530 530 def setfile(self, fname, data, mode, copied=None):
531 531 if self.maxsize < 0 or (len(data) + self.size) <= self.maxsize:
532 532 self.data[fname] = (data, mode, copied)
533 533 self.size += len(data)
534 534 else:
535 535 if self.opener is None:
536 536 root = tempfile.mkdtemp(prefix='hg-patch-')
537 537 self.opener = scmutil.opener(root)
538 538 # Avoid filename issues with these simple names
539 539 fn = str(self.created)
540 540 self.opener.write(fn, data)
541 541 self.created += 1
542 542 self.files[fname] = (fn, mode, copied)
543 543
544 544 def getfile(self, fname):
545 545 if fname in self.data:
546 546 return self.data[fname]
547 547 if not self.opener or fname not in self.files:
548 548 return None, None, None
549 549 fn, mode, copied = self.files[fname]
550 550 return self.opener.read(fn), mode, copied
551 551
552 552 def close(self):
553 553 if self.opener:
554 554 shutil.rmtree(self.opener.base)
555 555
556 556 class repobackend(abstractbackend):
557 557 def __init__(self, ui, repo, ctx, store):
558 558 super(repobackend, self).__init__(ui)
559 559 self.repo = repo
560 560 self.ctx = ctx
561 561 self.store = store
562 562 self.changed = set()
563 563 self.removed = set()
564 564 self.copied = {}
565 565
566 566 def _checkknown(self, fname):
567 567 if fname not in self.ctx:
568 568 raise PatchError(_('cannot patch %s: file is not tracked') % fname)
569 569
570 570 def getfile(self, fname):
571 571 try:
572 572 fctx = self.ctx[fname]
573 573 except error.LookupError:
574 574 return None, None
575 575 flags = fctx.flags()
576 576 return fctx.data(), ('l' in flags, 'x' in flags)
577 577
578 578 def setfile(self, fname, data, mode, copysource):
579 579 if copysource:
580 580 self._checkknown(copysource)
581 581 if data is None:
582 582 data = self.ctx[fname].data()
583 583 self.store.setfile(fname, data, mode, copysource)
584 584 self.changed.add(fname)
585 585 if copysource:
586 586 self.copied[fname] = copysource
587 587
588 588 def unlink(self, fname):
589 589 self._checkknown(fname)
590 590 self.removed.add(fname)
591 591
592 592 def exists(self, fname):
593 593 return fname in self.ctx
594 594
595 595 def close(self):
596 596 return self.changed | self.removed
597 597
598 598 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
599 599 unidesc = re.compile('@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@')
600 600 contextdesc = re.compile('(?:---|\*\*\*) (\d+)(?:,(\d+))? (?:---|\*\*\*)')
601 601 eolmodes = ['strict', 'crlf', 'lf', 'auto']
602 602
603 603 class patchfile(object):
604 604 def __init__(self, ui, gp, backend, store, eolmode='strict'):
605 605 self.fname = gp.path
606 606 self.eolmode = eolmode
607 607 self.eol = None
608 608 self.backend = backend
609 609 self.ui = ui
610 610 self.lines = []
611 611 self.exists = False
612 612 self.missing = True
613 613 self.mode = gp.mode
614 614 self.copysource = gp.oldpath
615 615 self.create = gp.op in ('ADD', 'COPY', 'RENAME')
616 616 self.remove = gp.op == 'DELETE'
617 617 if self.copysource is None:
618 618 data, mode = backend.getfile(self.fname)
619 619 else:
620 620 data, mode = store.getfile(self.copysource)[:2]
621 621 if data is not None:
622 622 self.exists = self.copysource is None or backend.exists(self.fname)
623 623 self.missing = False
624 624 if data:
625 625 self.lines = mdiff.splitnewlines(data)
626 626 if self.mode is None:
627 627 self.mode = mode
628 628 if self.lines:
629 629 # Normalize line endings
630 630 if self.lines[0].endswith('\r\n'):
631 631 self.eol = '\r\n'
632 632 elif self.lines[0].endswith('\n'):
633 633 self.eol = '\n'
634 634 if eolmode != 'strict':
635 635 nlines = []
636 636 for l in self.lines:
637 637 if l.endswith('\r\n'):
638 638 l = l[:-2] + '\n'
639 639 nlines.append(l)
640 640 self.lines = nlines
641 641 else:
642 642 if self.create:
643 643 self.missing = False
644 644 if self.mode is None:
645 645 self.mode = (False, False)
646 646 if self.missing:
647 647 self.ui.warn(_("unable to find '%s' for patching\n") % self.fname)
648 648
649 649 self.hash = {}
650 650 self.dirty = 0
651 651 self.offset = 0
652 652 self.skew = 0
653 653 self.rej = []
654 654 self.fileprinted = False
655 655 self.printfile(False)
656 656 self.hunks = 0
657 657
658 658 def writelines(self, fname, lines, mode):
659 659 if self.eolmode == 'auto':
660 660 eol = self.eol
661 661 elif self.eolmode == 'crlf':
662 662 eol = '\r\n'
663 663 else:
664 664 eol = '\n'
665 665
666 666 if self.eolmode != 'strict' and eol and eol != '\n':
667 667 rawlines = []
668 668 for l in lines:
669 669 if l and l[-1] == '\n':
670 670 l = l[:-1] + eol
671 671 rawlines.append(l)
672 672 lines = rawlines
673 673
674 674 self.backend.setfile(fname, ''.join(lines), mode, self.copysource)
675 675
676 676 def printfile(self, warn):
677 677 if self.fileprinted:
678 678 return
679 679 if warn or self.ui.verbose:
680 680 self.fileprinted = True
681 681 s = _("patching file %s\n") % self.fname
682 682 if warn:
683 683 self.ui.warn(s)
684 684 else:
685 685 self.ui.note(s)
686 686
687 687
688 688 def findlines(self, l, linenum):
689 689 # looks through the hash and finds candidate lines. The
690 690 # result is a list of line numbers sorted based on distance
691 691 # from linenum
692 692
693 693 cand = self.hash.get(l, [])
694 694 if len(cand) > 1:
695 695 # resort our list of potentials forward then back.
696 696 cand.sort(key=lambda x: abs(x - linenum))
697 697 return cand
698 698
699 699 def write_rej(self):
700 700 # our rejects are a little different from patch(1). This always
701 701 # creates rejects in the same form as the original patch. A file
702 702 # header is inserted so that you can run the reject through patch again
703 703 # without having to type the filename.
704 704 if not self.rej:
705 705 return
706 706 base = os.path.basename(self.fname)
707 707 lines = ["--- %s\n+++ %s\n" % (base, base)]
708 708 for x in self.rej:
709 709 for l in x.hunk:
710 710 lines.append(l)
711 711 if l[-1] != '\n':
712 712 lines.append("\n\ No newline at end of file\n")
713 713 self.backend.writerej(self.fname, len(self.rej), self.hunks, lines)
714 714
715 715 def apply(self, h):
716 716 if not h.complete():
717 717 raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
718 718 (h.number, h.desc, len(h.a), h.lena, len(h.b),
719 719 h.lenb))
720 720
721 721 self.hunks += 1
722 722
723 723 if self.missing:
724 724 self.rej.append(h)
725 725 return -1
726 726
727 727 if self.exists and self.create:
728 728 if self.copysource:
729 729 self.ui.warn(_("cannot create %s: destination already "
730 730 "exists\n") % self.fname)
731 731 else:
732 732 self.ui.warn(_("file %s already exists\n") % self.fname)
733 733 self.rej.append(h)
734 734 return -1
735 735
736 736 if isinstance(h, binhunk):
737 737 if self.remove:
738 738 self.backend.unlink(self.fname)
739 739 else:
740 740 l = h.new(self.lines)
741 741 self.lines[:] = l
742 742 self.offset += len(l)
743 743 self.dirty = True
744 744 return 0
745 745
746 746 horig = h
747 747 if (self.eolmode in ('crlf', 'lf')
748 748 or self.eolmode == 'auto' and self.eol):
749 749 # If new eols are going to be normalized, then normalize
750 750 # hunk data before patching. Otherwise, preserve input
751 751 # line-endings.
752 752 h = h.getnormalized()
753 753
754 754 # fast case first, no offsets, no fuzz
755 755 old, oldstart, new, newstart = h.fuzzit(0, False)
756 756 oldstart += self.offset
757 757 orig_start = oldstart
758 758 # if there's skew we want to emit the "(offset %d lines)" even
759 759 # when the hunk cleanly applies at start + skew, so skip the
760 760 # fast case code
761 761 if (self.skew == 0 and
762 762 diffhelpers.testhunk(old, self.lines, oldstart) == 0):
763 763 if self.remove:
764 764 self.backend.unlink(self.fname)
765 765 else:
766 766 self.lines[oldstart:oldstart + len(old)] = new
767 767 self.offset += len(new) - len(old)
768 768 self.dirty = True
769 769 return 0
770 770
771 771 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
772 772 self.hash = {}
773 773 for x, s in enumerate(self.lines):
774 774 self.hash.setdefault(s, []).append(x)
775 775
776 776 for fuzzlen in xrange(3):
777 777 for toponly in [True, False]:
778 778 old, oldstart, new, newstart = h.fuzzit(fuzzlen, toponly)
779 779 oldstart = oldstart + self.offset + self.skew
780 780 oldstart = min(oldstart, len(self.lines))
781 781 if old:
782 782 cand = self.findlines(old[0][1:], oldstart)
783 783 else:
784 784 # Only adding lines with no or fuzzed context, just
785 785 # take the skew in account
786 786 cand = [oldstart]
787 787
788 788 for l in cand:
789 789 if not old or diffhelpers.testhunk(old, self.lines, l) == 0:
790 790 self.lines[l : l + len(old)] = new
791 791 self.offset += len(new) - len(old)
792 792 self.skew = l - orig_start
793 793 self.dirty = True
794 794 offset = l - orig_start - fuzzlen
795 795 if fuzzlen:
796 796 msg = _("Hunk #%d succeeded at %d "
797 797 "with fuzz %d "
798 798 "(offset %d lines).\n")
799 799 self.printfile(True)
800 800 self.ui.warn(msg %
801 801 (h.number, l + 1, fuzzlen, offset))
802 802 else:
803 803 msg = _("Hunk #%d succeeded at %d "
804 804 "(offset %d lines).\n")
805 805 self.ui.note(msg % (h.number, l + 1, offset))
806 806 return fuzzlen
807 807 self.printfile(True)
808 808 self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
809 809 self.rej.append(horig)
810 810 return -1
811 811
812 812 def close(self):
813 813 if self.dirty:
814 814 self.writelines(self.fname, self.lines, self.mode)
815 815 self.write_rej()
816 816 return len(self.rej)
817 817
818 818 class header(object):
819 819 """patch header
820 820 """
821 821 diffgit_re = re.compile('diff --git a/(.*) b/(.*)$')
822 822 diff_re = re.compile('diff -r .* (.*)$')
823 823 allhunks_re = re.compile('(?:index|deleted file) ')
824 824 pretty_re = re.compile('(?:new file|deleted file) ')
825 825 special_re = re.compile('(?:index|deleted|copy|rename) ')
826 826 newfile_re = re.compile('(?:new file)')
827 827
828 828 def __init__(self, header):
829 829 self.header = header
830 830 self.hunks = []
831 831
832 832 def binary(self):
833 833 return any(h.startswith('index ') for h in self.header)
834 834
835 835 def pretty(self, fp):
836 836 for h in self.header:
837 837 if h.startswith('index '):
838 838 fp.write(_('this modifies a binary file (all or nothing)\n'))
839 839 break
840 840 if self.pretty_re.match(h):
841 841 fp.write(h)
842 842 if self.binary():
843 843 fp.write(_('this is a binary file\n'))
844 844 break
845 845 if h.startswith('---'):
846 846 fp.write(_('%d hunks, %d lines changed\n') %
847 847 (len(self.hunks),
848 848 sum([max(h.added, h.removed) for h in self.hunks])))
849 849 break
850 850 fp.write(h)
851 851
852 852 def write(self, fp):
853 853 fp.write(''.join(self.header))
854 854
855 855 def allhunks(self):
856 856 return any(self.allhunks_re.match(h) for h in self.header)
857 857
858 858 def files(self):
859 859 match = self.diffgit_re.match(self.header[0])
860 860 if match:
861 861 fromfile, tofile = match.groups()
862 862 if fromfile == tofile:
863 863 return [fromfile]
864 864 return [fromfile, tofile]
865 865 else:
866 866 return self.diff_re.match(self.header[0]).groups()
867 867
868 868 def filename(self):
869 869 return self.files()[-1]
870 870
871 871 def __repr__(self):
872 872 return '<header %s>' % (' '.join(map(repr, self.files())))
873 873
874 874 def isnewfile(self):
875 875 return any(self.newfile_re.match(h) for h in self.header)
876 876
877 877 def special(self):
878 878 # Special files are shown only at the header level and not at the hunk
879 879 # level for example a file that has been deleted is a special file.
880 880 # The user cannot change the content of the operation, in the case of
881 881 # the deleted file he has to take the deletion or not take it, he
882 882 # cannot take some of it.
883 883 # Newly added files are special if they are empty, they are not special
884 884 # if they have some content as we want to be able to change it
885 885 nocontent = len(self.header) == 2
886 886 emptynewfile = self.isnewfile() and nocontent
887 887 return emptynewfile or \
888 888 any(self.special_re.match(h) for h in self.header)
889 889
890 890 class recordhunk(object):
891 891 """patch hunk
892 892
893 893 XXX shouldn't we merge this with the other hunk class?
894 894 """
895 895 maxcontext = 3
896 896
897 897 def __init__(self, header, fromline, toline, proc, before, hunk, after):
898 898 def trimcontext(number, lines):
899 899 delta = len(lines) - self.maxcontext
900 900 if False and delta > 0:
901 901 return number + delta, lines[:self.maxcontext]
902 902 return number, lines
903 903
904 904 self.header = header
905 905 self.fromline, self.before = trimcontext(fromline, before)
906 906 self.toline, self.after = trimcontext(toline, after)
907 907 self.proc = proc
908 908 self.hunk = hunk
909 909 self.added, self.removed = self.countchanges(self.hunk)
910 910
911 911 def __eq__(self, v):
912 912 if not isinstance(v, recordhunk):
913 913 return False
914 914
915 915 return ((v.hunk == self.hunk) and
916 916 (v.proc == self.proc) and
917 917 (self.fromline == v.fromline) and
918 918 (self.header.files() == v.header.files()))
919 919
920 920 def __hash__(self):
921 921 return hash((tuple(self.hunk),
922 922 tuple(self.header.files()),
923 923 self.fromline,
924 924 self.proc))
925 925
926 926 def countchanges(self, hunk):
927 927 """hunk -> (n+,n-)"""
928 928 add = len([h for h in hunk if h[0] == '+'])
929 929 rem = len([h for h in hunk if h[0] == '-'])
930 930 return add, rem
931 931
932 932 def write(self, fp):
933 933 delta = len(self.before) + len(self.after)
934 934 if self.after and self.after[-1] == '\\ No newline at end of file\n':
935 935 delta -= 1
936 936 fromlen = delta + self.removed
937 937 tolen = delta + self.added
938 938 fp.write('@@ -%d,%d +%d,%d @@%s\n' %
939 939 (self.fromline, fromlen, self.toline, tolen,
940 940 self.proc and (' ' + self.proc)))
941 941 fp.write(''.join(self.before + self.hunk + self.after))
942 942
943 943 pretty = write
944 944
945 945 def filename(self):
946 946 return self.header.filename()
947 947
948 948 def __repr__(self):
949 949 return '<hunk %r@%d>' % (self.filename(), self.fromline)
950 950
951 def filterpatch(ui, headers):
951 def filterpatch(ui, headers, operation=None):
952 952 """Interactively filter patch chunks into applied-only chunks"""
953 953
954 954 def prompt(skipfile, skipall, query, chunk):
955 955 """prompt query, and process base inputs
956 956
957 957 - y/n for the rest of file
958 958 - y/n for the rest
959 959 - ? (help)
960 960 - q (quit)
961 961
962 962 Return True/False and possibly updated skipfile and skipall.
963 963 """
964 964 newpatches = None
965 965 if skipall is not None:
966 966 return skipall, skipfile, skipall, newpatches
967 967 if skipfile is not None:
968 968 return skipfile, skipfile, skipall, newpatches
969 969 while True:
970 970 resps = _('[Ynesfdaq?]'
971 971 '$$ &Yes, record this change'
972 972 '$$ &No, skip this change'
973 973 '$$ &Edit this change manually'
974 974 '$$ &Skip remaining changes to this file'
975 975 '$$ Record remaining changes to this &file'
976 976 '$$ &Done, skip remaining changes and files'
977 977 '$$ Record &all changes to all remaining files'
978 978 '$$ &Quit, recording no changes'
979 979 '$$ &? (display help)')
980 980 r = ui.promptchoice("%s %s" % (query, resps))
981 981 ui.write("\n")
982 982 if r == 8: # ?
983 983 for c, t in ui.extractchoices(resps)[1]:
984 984 ui.write('%s - %s\n' % (c, t.lower()))
985 985 continue
986 986 elif r == 0: # yes
987 987 ret = True
988 988 elif r == 1: # no
989 989 ret = False
990 990 elif r == 2: # Edit patch
991 991 if chunk is None:
992 992 ui.write(_('cannot edit patch for whole file'))
993 993 ui.write("\n")
994 994 continue
995 995 if chunk.header.binary():
996 996 ui.write(_('cannot edit patch for binary file'))
997 997 ui.write("\n")
998 998 continue
999 999 # Patch comment based on the Git one (based on comment at end of
1000 1000 # http://mercurial.selenic.com/wiki/RecordExtension)
1001 1001 phelp = '---' + _("""
1002 1002 To remove '-' lines, make them ' ' lines (context).
1003 1003 To remove '+' lines, delete them.
1004 1004 Lines starting with # will be removed from the patch.
1005 1005
1006 1006 If the patch applies cleanly, the edited hunk will immediately be
1007 1007 added to the record list. If it does not apply cleanly, a rejects
1008 1008 file will be generated: you can use that when you try again. If
1009 1009 all lines of the hunk are removed, then the edit is aborted and
1010 1010 the hunk is left unchanged.
1011 1011 """)
1012 1012 (patchfd, patchfn) = tempfile.mkstemp(prefix="hg-editor-",
1013 1013 suffix=".diff", text=True)
1014 1014 ncpatchfp = None
1015 1015 try:
1016 1016 # Write the initial patch
1017 1017 f = os.fdopen(patchfd, "w")
1018 1018 chunk.header.write(f)
1019 1019 chunk.write(f)
1020 1020 f.write('\n'.join(['# ' + i for i in phelp.splitlines()]))
1021 1021 f.close()
1022 1022 # Start the editor and wait for it to complete
1023 1023 editor = ui.geteditor()
1024 1024 ui.system("%s \"%s\"" % (editor, patchfn),
1025 1025 environ={'HGUSER': ui.username()},
1026 1026 onerr=util.Abort, errprefix=_("edit failed"))
1027 1027 # Remove comment lines
1028 1028 patchfp = open(patchfn)
1029 1029 ncpatchfp = cStringIO.StringIO()
1030 1030 for line in patchfp:
1031 1031 if not line.startswith('#'):
1032 1032 ncpatchfp.write(line)
1033 1033 patchfp.close()
1034 1034 ncpatchfp.seek(0)
1035 1035 newpatches = parsepatch(ncpatchfp)
1036 1036 finally:
1037 1037 os.unlink(patchfn)
1038 1038 del ncpatchfp
1039 1039 # Signal that the chunk shouldn't be applied as-is, but
1040 1040 # provide the new patch to be used instead.
1041 1041 ret = False
1042 1042 elif r == 3: # Skip
1043 1043 ret = skipfile = False
1044 1044 elif r == 4: # file (Record remaining)
1045 1045 ret = skipfile = True
1046 1046 elif r == 5: # done, skip remaining
1047 1047 ret = skipall = False
1048 1048 elif r == 6: # all
1049 1049 ret = skipall = True
1050 1050 elif r == 7: # quit
1051 1051 raise util.Abort(_('user quit'))
1052 1052 return ret, skipfile, skipall, newpatches
1053 1053
1054 1054 seen = set()
1055 1055 applied = {} # 'filename' -> [] of chunks
1056 1056 skipfile, skipall = None, None
1057 1057 pos, total = 1, sum(len(h.hunks) for h in headers)
1058 1058 for h in headers:
1059 1059 pos += len(h.hunks)
1060 1060 skipfile = None
1061 1061 fixoffset = 0
1062 1062 hdr = ''.join(h.header)
1063 1063 if hdr in seen:
1064 1064 continue
1065 1065 seen.add(hdr)
1066 1066 if skipall is None:
1067 1067 h.pretty(ui)
1068 1068 msg = (_('examine changes to %s?') %
1069 1069 _(' and ').join("'%s'" % f for f in h.files()))
1070 1070 r, skipfile, skipall, np = prompt(skipfile, skipall, msg, None)
1071 1071 if not r:
1072 1072 continue
1073 1073 applied[h.filename()] = [h]
1074 1074 if h.allhunks():
1075 1075 applied[h.filename()] += h.hunks
1076 1076 continue
1077 1077 for i, chunk in enumerate(h.hunks):
1078 1078 if skipfile is None and skipall is None:
1079 1079 chunk.pretty(ui)
1080 1080 if total == 1:
1081 1081 msg = _("record this change to '%s'?") % chunk.filename()
1082 1082 else:
1083 1083 idx = pos - len(h.hunks) + i
1084 1084 msg = _("record change %d/%d to '%s'?") % (idx, total,
1085 1085 chunk.filename())
1086 1086 r, skipfile, skipall, newpatches = prompt(skipfile,
1087 1087 skipall, msg, chunk)
1088 1088 if r:
1089 1089 if fixoffset:
1090 1090 chunk = copy.copy(chunk)
1091 1091 chunk.toline += fixoffset
1092 1092 applied[chunk.filename()].append(chunk)
1093 1093 elif newpatches is not None:
1094 1094 for newpatch in newpatches:
1095 1095 for newhunk in newpatch.hunks:
1096 1096 if fixoffset:
1097 1097 newhunk.toline += fixoffset
1098 1098 applied[newhunk.filename()].append(newhunk)
1099 1099 else:
1100 1100 fixoffset += chunk.removed - chunk.added
1101 1101 return sum([h for h in applied.itervalues()
1102 1102 if h[0].special() or len(h) > 1], [])
1103 1103 class hunk(object):
1104 1104 def __init__(self, desc, num, lr, context):
1105 1105 self.number = num
1106 1106 self.desc = desc
1107 1107 self.hunk = [desc]
1108 1108 self.a = []
1109 1109 self.b = []
1110 1110 self.starta = self.lena = None
1111 1111 self.startb = self.lenb = None
1112 1112 if lr is not None:
1113 1113 if context:
1114 1114 self.read_context_hunk(lr)
1115 1115 else:
1116 1116 self.read_unified_hunk(lr)
1117 1117
1118 1118 def getnormalized(self):
1119 1119 """Return a copy with line endings normalized to LF."""
1120 1120
1121 1121 def normalize(lines):
1122 1122 nlines = []
1123 1123 for line in lines:
1124 1124 if line.endswith('\r\n'):
1125 1125 line = line[:-2] + '\n'
1126 1126 nlines.append(line)
1127 1127 return nlines
1128 1128
1129 1129 # Dummy object, it is rebuilt manually
1130 1130 nh = hunk(self.desc, self.number, None, None)
1131 1131 nh.number = self.number
1132 1132 nh.desc = self.desc
1133 1133 nh.hunk = self.hunk
1134 1134 nh.a = normalize(self.a)
1135 1135 nh.b = normalize(self.b)
1136 1136 nh.starta = self.starta
1137 1137 nh.startb = self.startb
1138 1138 nh.lena = self.lena
1139 1139 nh.lenb = self.lenb
1140 1140 return nh
1141 1141
1142 1142 def read_unified_hunk(self, lr):
1143 1143 m = unidesc.match(self.desc)
1144 1144 if not m:
1145 1145 raise PatchError(_("bad hunk #%d") % self.number)
1146 1146 self.starta, self.lena, self.startb, self.lenb = m.groups()
1147 1147 if self.lena is None:
1148 1148 self.lena = 1
1149 1149 else:
1150 1150 self.lena = int(self.lena)
1151 1151 if self.lenb is None:
1152 1152 self.lenb = 1
1153 1153 else:
1154 1154 self.lenb = int(self.lenb)
1155 1155 self.starta = int(self.starta)
1156 1156 self.startb = int(self.startb)
1157 1157 diffhelpers.addlines(lr, self.hunk, self.lena, self.lenb, self.a,
1158 1158 self.b)
1159 1159 # if we hit eof before finishing out the hunk, the last line will
1160 1160 # be zero length. Lets try to fix it up.
1161 1161 while len(self.hunk[-1]) == 0:
1162 1162 del self.hunk[-1]
1163 1163 del self.a[-1]
1164 1164 del self.b[-1]
1165 1165 self.lena -= 1
1166 1166 self.lenb -= 1
1167 1167 self._fixnewline(lr)
1168 1168
1169 1169 def read_context_hunk(self, lr):
1170 1170 self.desc = lr.readline()
1171 1171 m = contextdesc.match(self.desc)
1172 1172 if not m:
1173 1173 raise PatchError(_("bad hunk #%d") % self.number)
1174 1174 self.starta, aend = m.groups()
1175 1175 self.starta = int(self.starta)
1176 1176 if aend is None:
1177 1177 aend = self.starta
1178 1178 self.lena = int(aend) - self.starta
1179 1179 if self.starta:
1180 1180 self.lena += 1
1181 1181 for x in xrange(self.lena):
1182 1182 l = lr.readline()
1183 1183 if l.startswith('---'):
1184 1184 # lines addition, old block is empty
1185 1185 lr.push(l)
1186 1186 break
1187 1187 s = l[2:]
1188 1188 if l.startswith('- ') or l.startswith('! '):
1189 1189 u = '-' + s
1190 1190 elif l.startswith(' '):
1191 1191 u = ' ' + s
1192 1192 else:
1193 1193 raise PatchError(_("bad hunk #%d old text line %d") %
1194 1194 (self.number, x))
1195 1195 self.a.append(u)
1196 1196 self.hunk.append(u)
1197 1197
1198 1198 l = lr.readline()
1199 1199 if l.startswith('\ '):
1200 1200 s = self.a[-1][:-1]
1201 1201 self.a[-1] = s
1202 1202 self.hunk[-1] = s
1203 1203 l = lr.readline()
1204 1204 m = contextdesc.match(l)
1205 1205 if not m:
1206 1206 raise PatchError(_("bad hunk #%d") % self.number)
1207 1207 self.startb, bend = m.groups()
1208 1208 self.startb = int(self.startb)
1209 1209 if bend is None:
1210 1210 bend = self.startb
1211 1211 self.lenb = int(bend) - self.startb
1212 1212 if self.startb:
1213 1213 self.lenb += 1
1214 1214 hunki = 1
1215 1215 for x in xrange(self.lenb):
1216 1216 l = lr.readline()
1217 1217 if l.startswith('\ '):
1218 1218 # XXX: the only way to hit this is with an invalid line range.
1219 1219 # The no-eol marker is not counted in the line range, but I
1220 1220 # guess there are diff(1) out there which behave differently.
1221 1221 s = self.b[-1][:-1]
1222 1222 self.b[-1] = s
1223 1223 self.hunk[hunki - 1] = s
1224 1224 continue
1225 1225 if not l:
1226 1226 # line deletions, new block is empty and we hit EOF
1227 1227 lr.push(l)
1228 1228 break
1229 1229 s = l[2:]
1230 1230 if l.startswith('+ ') or l.startswith('! '):
1231 1231 u = '+' + s
1232 1232 elif l.startswith(' '):
1233 1233 u = ' ' + s
1234 1234 elif len(self.b) == 0:
1235 1235 # line deletions, new block is empty
1236 1236 lr.push(l)
1237 1237 break
1238 1238 else:
1239 1239 raise PatchError(_("bad hunk #%d old text line %d") %
1240 1240 (self.number, x))
1241 1241 self.b.append(s)
1242 1242 while True:
1243 1243 if hunki >= len(self.hunk):
1244 1244 h = ""
1245 1245 else:
1246 1246 h = self.hunk[hunki]
1247 1247 hunki += 1
1248 1248 if h == u:
1249 1249 break
1250 1250 elif h.startswith('-'):
1251 1251 continue
1252 1252 else:
1253 1253 self.hunk.insert(hunki - 1, u)
1254 1254 break
1255 1255
1256 1256 if not self.a:
1257 1257 # this happens when lines were only added to the hunk
1258 1258 for x in self.hunk:
1259 1259 if x.startswith('-') or x.startswith(' '):
1260 1260 self.a.append(x)
1261 1261 if not self.b:
1262 1262 # this happens when lines were only deleted from the hunk
1263 1263 for x in self.hunk:
1264 1264 if x.startswith('+') or x.startswith(' '):
1265 1265 self.b.append(x[1:])
1266 1266 # @@ -start,len +start,len @@
1267 1267 self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
1268 1268 self.startb, self.lenb)
1269 1269 self.hunk[0] = self.desc
1270 1270 self._fixnewline(lr)
1271 1271
1272 1272 def _fixnewline(self, lr):
1273 1273 l = lr.readline()
1274 1274 if l.startswith('\ '):
1275 1275 diffhelpers.fix_newline(self.hunk, self.a, self.b)
1276 1276 else:
1277 1277 lr.push(l)
1278 1278
1279 1279 def complete(self):
1280 1280 return len(self.a) == self.lena and len(self.b) == self.lenb
1281 1281
1282 1282 def _fuzzit(self, old, new, fuzz, toponly):
1283 1283 # this removes context lines from the top and bottom of list 'l'. It
1284 1284 # checks the hunk to make sure only context lines are removed, and then
1285 1285 # returns a new shortened list of lines.
1286 1286 fuzz = min(fuzz, len(old))
1287 1287 if fuzz:
1288 1288 top = 0
1289 1289 bot = 0
1290 1290 hlen = len(self.hunk)
1291 1291 for x in xrange(hlen - 1):
1292 1292 # the hunk starts with the @@ line, so use x+1
1293 1293 if self.hunk[x + 1][0] == ' ':
1294 1294 top += 1
1295 1295 else:
1296 1296 break
1297 1297 if not toponly:
1298 1298 for x in xrange(hlen - 1):
1299 1299 if self.hunk[hlen - bot - 1][0] == ' ':
1300 1300 bot += 1
1301 1301 else:
1302 1302 break
1303 1303
1304 1304 bot = min(fuzz, bot)
1305 1305 top = min(fuzz, top)
1306 1306 return old[top:len(old) - bot], new[top:len(new) - bot], top
1307 1307 return old, new, 0
1308 1308
1309 1309 def fuzzit(self, fuzz, toponly):
1310 1310 old, new, top = self._fuzzit(self.a, self.b, fuzz, toponly)
1311 1311 oldstart = self.starta + top
1312 1312 newstart = self.startb + top
1313 1313 # zero length hunk ranges already have their start decremented
1314 1314 if self.lena and oldstart > 0:
1315 1315 oldstart -= 1
1316 1316 if self.lenb and newstart > 0:
1317 1317 newstart -= 1
1318 1318 return old, oldstart, new, newstart
1319 1319
1320 1320 class binhunk(object):
1321 1321 'A binary patch file.'
1322 1322 def __init__(self, lr, fname):
1323 1323 self.text = None
1324 1324 self.delta = False
1325 1325 self.hunk = ['GIT binary patch\n']
1326 1326 self._fname = fname
1327 1327 self._read(lr)
1328 1328
1329 1329 def complete(self):
1330 1330 return self.text is not None
1331 1331
1332 1332 def new(self, lines):
1333 1333 if self.delta:
1334 1334 return [applybindelta(self.text, ''.join(lines))]
1335 1335 return [self.text]
1336 1336
1337 1337 def _read(self, lr):
1338 1338 def getline(lr, hunk):
1339 1339 l = lr.readline()
1340 1340 hunk.append(l)
1341 1341 return l.rstrip('\r\n')
1342 1342
1343 1343 size = 0
1344 1344 while True:
1345 1345 line = getline(lr, self.hunk)
1346 1346 if not line:
1347 1347 raise PatchError(_('could not extract "%s" binary data')
1348 1348 % self._fname)
1349 1349 if line.startswith('literal '):
1350 1350 size = int(line[8:].rstrip())
1351 1351 break
1352 1352 if line.startswith('delta '):
1353 1353 size = int(line[6:].rstrip())
1354 1354 self.delta = True
1355 1355 break
1356 1356 dec = []
1357 1357 line = getline(lr, self.hunk)
1358 1358 while len(line) > 1:
1359 1359 l = line[0]
1360 1360 if l <= 'Z' and l >= 'A':
1361 1361 l = ord(l) - ord('A') + 1
1362 1362 else:
1363 1363 l = ord(l) - ord('a') + 27
1364 1364 try:
1365 1365 dec.append(base85.b85decode(line[1:])[:l])
1366 1366 except ValueError, e:
1367 1367 raise PatchError(_('could not decode "%s" binary patch: %s')
1368 1368 % (self._fname, str(e)))
1369 1369 line = getline(lr, self.hunk)
1370 1370 text = zlib.decompress(''.join(dec))
1371 1371 if len(text) != size:
1372 1372 raise PatchError(_('"%s" length is %d bytes, should be %d')
1373 1373 % (self._fname, len(text), size))
1374 1374 self.text = text
1375 1375
1376 1376 def parsefilename(str):
1377 1377 # --- filename \t|space stuff
1378 1378 s = str[4:].rstrip('\r\n')
1379 1379 i = s.find('\t')
1380 1380 if i < 0:
1381 1381 i = s.find(' ')
1382 1382 if i < 0:
1383 1383 return s
1384 1384 return s[:i]
1385 1385
1386 1386 def parsepatch(originalchunks):
1387 1387 """patch -> [] of headers -> [] of hunks """
1388 1388 class parser(object):
1389 1389 """patch parsing state machine"""
1390 1390 def __init__(self):
1391 1391 self.fromline = 0
1392 1392 self.toline = 0
1393 1393 self.proc = ''
1394 1394 self.header = None
1395 1395 self.context = []
1396 1396 self.before = []
1397 1397 self.hunk = []
1398 1398 self.headers = []
1399 1399
1400 1400 def addrange(self, limits):
1401 1401 fromstart, fromend, tostart, toend, proc = limits
1402 1402 self.fromline = int(fromstart)
1403 1403 self.toline = int(tostart)
1404 1404 self.proc = proc
1405 1405
1406 1406 def addcontext(self, context):
1407 1407 if self.hunk:
1408 1408 h = recordhunk(self.header, self.fromline, self.toline,
1409 1409 self.proc, self.before, self.hunk, context)
1410 1410 self.header.hunks.append(h)
1411 1411 self.fromline += len(self.before) + h.removed
1412 1412 self.toline += len(self.before) + h.added
1413 1413 self.before = []
1414 1414 self.hunk = []
1415 1415 self.proc = ''
1416 1416 self.context = context
1417 1417
1418 1418 def addhunk(self, hunk):
1419 1419 if self.context:
1420 1420 self.before = self.context
1421 1421 self.context = []
1422 1422 self.hunk = hunk
1423 1423
1424 1424 def newfile(self, hdr):
1425 1425 self.addcontext([])
1426 1426 h = header(hdr)
1427 1427 self.headers.append(h)
1428 1428 self.header = h
1429 1429
1430 1430 def addother(self, line):
1431 1431 pass # 'other' lines are ignored
1432 1432
1433 1433 def finished(self):
1434 1434 self.addcontext([])
1435 1435 return self.headers
1436 1436
1437 1437 transitions = {
1438 1438 'file': {'context': addcontext,
1439 1439 'file': newfile,
1440 1440 'hunk': addhunk,
1441 1441 'range': addrange},
1442 1442 'context': {'file': newfile,
1443 1443 'hunk': addhunk,
1444 1444 'range': addrange,
1445 1445 'other': addother},
1446 1446 'hunk': {'context': addcontext,
1447 1447 'file': newfile,
1448 1448 'range': addrange},
1449 1449 'range': {'context': addcontext,
1450 1450 'hunk': addhunk},
1451 1451 'other': {'other': addother},
1452 1452 }
1453 1453
1454 1454 p = parser()
1455 1455 fp = cStringIO.StringIO()
1456 1456 fp.write(''.join(originalchunks))
1457 1457 fp.seek(0)
1458 1458
1459 1459 state = 'context'
1460 1460 for newstate, data in scanpatch(fp):
1461 1461 try:
1462 1462 p.transitions[state][newstate](p, data)
1463 1463 except KeyError:
1464 1464 raise PatchError('unhandled transition: %s -> %s' %
1465 1465 (state, newstate))
1466 1466 state = newstate
1467 1467 del fp
1468 1468 return p.finished()
1469 1469
1470 1470 def pathtransform(path, strip, prefix):
1471 1471 '''turn a path from a patch into a path suitable for the repository
1472 1472
1473 1473 prefix, if not empty, is expected to be normalized with a / at the end.
1474 1474
1475 1475 Returns (stripped components, path in repository).
1476 1476
1477 1477 >>> pathtransform('a/b/c', 0, '')
1478 1478 ('', 'a/b/c')
1479 1479 >>> pathtransform(' a/b/c ', 0, '')
1480 1480 ('', ' a/b/c')
1481 1481 >>> pathtransform(' a/b/c ', 2, '')
1482 1482 ('a/b/', 'c')
1483 1483 >>> pathtransform('a/b/c', 0, 'd/e/')
1484 1484 ('', 'd/e/a/b/c')
1485 1485 >>> pathtransform(' a//b/c ', 2, 'd/e/')
1486 1486 ('a//b/', 'd/e/c')
1487 1487 >>> pathtransform('a/b/c', 3, '')
1488 1488 Traceback (most recent call last):
1489 1489 PatchError: unable to strip away 1 of 3 dirs from a/b/c
1490 1490 '''
1491 1491 pathlen = len(path)
1492 1492 i = 0
1493 1493 if strip == 0:
1494 1494 return '', prefix + path.rstrip()
1495 1495 count = strip
1496 1496 while count > 0:
1497 1497 i = path.find('/', i)
1498 1498 if i == -1:
1499 1499 raise PatchError(_("unable to strip away %d of %d dirs from %s") %
1500 1500 (count, strip, path))
1501 1501 i += 1
1502 1502 # consume '//' in the path
1503 1503 while i < pathlen - 1 and path[i] == '/':
1504 1504 i += 1
1505 1505 count -= 1
1506 1506 return path[:i].lstrip(), prefix + path[i:].rstrip()
1507 1507
1508 1508 def makepatchmeta(backend, afile_orig, bfile_orig, hunk, strip, prefix):
1509 1509 nulla = afile_orig == "/dev/null"
1510 1510 nullb = bfile_orig == "/dev/null"
1511 1511 create = nulla and hunk.starta == 0 and hunk.lena == 0
1512 1512 remove = nullb and hunk.startb == 0 and hunk.lenb == 0
1513 1513 abase, afile = pathtransform(afile_orig, strip, prefix)
1514 1514 gooda = not nulla and backend.exists(afile)
1515 1515 bbase, bfile = pathtransform(bfile_orig, strip, prefix)
1516 1516 if afile == bfile:
1517 1517 goodb = gooda
1518 1518 else:
1519 1519 goodb = not nullb and backend.exists(bfile)
1520 1520 missing = not goodb and not gooda and not create
1521 1521
1522 1522 # some diff programs apparently produce patches where the afile is
1523 1523 # not /dev/null, but afile starts with bfile
1524 1524 abasedir = afile[:afile.rfind('/') + 1]
1525 1525 bbasedir = bfile[:bfile.rfind('/') + 1]
1526 1526 if (missing and abasedir == bbasedir and afile.startswith(bfile)
1527 1527 and hunk.starta == 0 and hunk.lena == 0):
1528 1528 create = True
1529 1529 missing = False
1530 1530
1531 1531 # If afile is "a/b/foo" and bfile is "a/b/foo.orig" we assume the
1532 1532 # diff is between a file and its backup. In this case, the original
1533 1533 # file should be patched (see original mpatch code).
1534 1534 isbackup = (abase == bbase and bfile.startswith(afile))
1535 1535 fname = None
1536 1536 if not missing:
1537 1537 if gooda and goodb:
1538 1538 if isbackup:
1539 1539 fname = afile
1540 1540 else:
1541 1541 fname = bfile
1542 1542 elif gooda:
1543 1543 fname = afile
1544 1544
1545 1545 if not fname:
1546 1546 if not nullb:
1547 1547 if isbackup:
1548 1548 fname = afile
1549 1549 else:
1550 1550 fname = bfile
1551 1551 elif not nulla:
1552 1552 fname = afile
1553 1553 else:
1554 1554 raise PatchError(_("undefined source and destination files"))
1555 1555
1556 1556 gp = patchmeta(fname)
1557 1557 if create:
1558 1558 gp.op = 'ADD'
1559 1559 elif remove:
1560 1560 gp.op = 'DELETE'
1561 1561 return gp
1562 1562
1563 1563 def scanpatch(fp):
1564 1564 """like patch.iterhunks, but yield different events
1565 1565
1566 1566 - ('file', [header_lines + fromfile + tofile])
1567 1567 - ('context', [context_lines])
1568 1568 - ('hunk', [hunk_lines])
1569 1569 - ('range', (-start,len, +start,len, proc))
1570 1570 """
1571 1571 lines_re = re.compile(r'@@ -(\d+),(\d+) \+(\d+),(\d+) @@\s*(.*)')
1572 1572 lr = linereader(fp)
1573 1573
1574 1574 def scanwhile(first, p):
1575 1575 """scan lr while predicate holds"""
1576 1576 lines = [first]
1577 1577 while True:
1578 1578 line = lr.readline()
1579 1579 if not line:
1580 1580 break
1581 1581 if p(line):
1582 1582 lines.append(line)
1583 1583 else:
1584 1584 lr.push(line)
1585 1585 break
1586 1586 return lines
1587 1587
1588 1588 while True:
1589 1589 line = lr.readline()
1590 1590 if not line:
1591 1591 break
1592 1592 if line.startswith('diff --git a/') or line.startswith('diff -r '):
1593 1593 def notheader(line):
1594 1594 s = line.split(None, 1)
1595 1595 return not s or s[0] not in ('---', 'diff')
1596 1596 header = scanwhile(line, notheader)
1597 1597 fromfile = lr.readline()
1598 1598 if fromfile.startswith('---'):
1599 1599 tofile = lr.readline()
1600 1600 header += [fromfile, tofile]
1601 1601 else:
1602 1602 lr.push(fromfile)
1603 1603 yield 'file', header
1604 1604 elif line[0] == ' ':
1605 1605 yield 'context', scanwhile(line, lambda l: l[0] in ' \\')
1606 1606 elif line[0] in '-+':
1607 1607 yield 'hunk', scanwhile(line, lambda l: l[0] in '-+\\')
1608 1608 else:
1609 1609 m = lines_re.match(line)
1610 1610 if m:
1611 1611 yield 'range', m.groups()
1612 1612 else:
1613 1613 yield 'other', line
1614 1614
1615 1615 def scangitpatch(lr, firstline):
1616 1616 """
1617 1617 Git patches can emit:
1618 1618 - rename a to b
1619 1619 - change b
1620 1620 - copy a to c
1621 1621 - change c
1622 1622
1623 1623 We cannot apply this sequence as-is, the renamed 'a' could not be
1624 1624 found for it would have been renamed already. And we cannot copy
1625 1625 from 'b' instead because 'b' would have been changed already. So
1626 1626 we scan the git patch for copy and rename commands so we can
1627 1627 perform the copies ahead of time.
1628 1628 """
1629 1629 pos = 0
1630 1630 try:
1631 1631 pos = lr.fp.tell()
1632 1632 fp = lr.fp
1633 1633 except IOError:
1634 1634 fp = cStringIO.StringIO(lr.fp.read())
1635 1635 gitlr = linereader(fp)
1636 1636 gitlr.push(firstline)
1637 1637 gitpatches = readgitpatch(gitlr)
1638 1638 fp.seek(pos)
1639 1639 return gitpatches
1640 1640
1641 1641 def iterhunks(fp):
1642 1642 """Read a patch and yield the following events:
1643 1643 - ("file", afile, bfile, firsthunk): select a new target file.
1644 1644 - ("hunk", hunk): a new hunk is ready to be applied, follows a
1645 1645 "file" event.
1646 1646 - ("git", gitchanges): current diff is in git format, gitchanges
1647 1647 maps filenames to gitpatch records. Unique event.
1648 1648 """
1649 1649 afile = ""
1650 1650 bfile = ""
1651 1651 state = None
1652 1652 hunknum = 0
1653 1653 emitfile = newfile = False
1654 1654 gitpatches = None
1655 1655
1656 1656 # our states
1657 1657 BFILE = 1
1658 1658 context = None
1659 1659 lr = linereader(fp)
1660 1660
1661 1661 while True:
1662 1662 x = lr.readline()
1663 1663 if not x:
1664 1664 break
1665 1665 if state == BFILE and (
1666 1666 (not context and x[0] == '@')
1667 1667 or (context is not False and x.startswith('***************'))
1668 1668 or x.startswith('GIT binary patch')):
1669 1669 gp = None
1670 1670 if (gitpatches and
1671 1671 gitpatches[-1].ispatching(afile, bfile)):
1672 1672 gp = gitpatches.pop()
1673 1673 if x.startswith('GIT binary patch'):
1674 1674 h = binhunk(lr, gp.path)
1675 1675 else:
1676 1676 if context is None and x.startswith('***************'):
1677 1677 context = True
1678 1678 h = hunk(x, hunknum + 1, lr, context)
1679 1679 hunknum += 1
1680 1680 if emitfile:
1681 1681 emitfile = False
1682 1682 yield 'file', (afile, bfile, h, gp and gp.copy() or None)
1683 1683 yield 'hunk', h
1684 1684 elif x.startswith('diff --git a/'):
1685 1685 m = gitre.match(x.rstrip(' \r\n'))
1686 1686 if not m:
1687 1687 continue
1688 1688 if gitpatches is None:
1689 1689 # scan whole input for git metadata
1690 1690 gitpatches = scangitpatch(lr, x)
1691 1691 yield 'git', [g.copy() for g in gitpatches
1692 1692 if g.op in ('COPY', 'RENAME')]
1693 1693 gitpatches.reverse()
1694 1694 afile = 'a/' + m.group(1)
1695 1695 bfile = 'b/' + m.group(2)
1696 1696 while gitpatches and not gitpatches[-1].ispatching(afile, bfile):
1697 1697 gp = gitpatches.pop()
1698 1698 yield 'file', ('a/' + gp.path, 'b/' + gp.path, None, gp.copy())
1699 1699 if not gitpatches:
1700 1700 raise PatchError(_('failed to synchronize metadata for "%s"')
1701 1701 % afile[2:])
1702 1702 gp = gitpatches[-1]
1703 1703 newfile = True
1704 1704 elif x.startswith('---'):
1705 1705 # check for a unified diff
1706 1706 l2 = lr.readline()
1707 1707 if not l2.startswith('+++'):
1708 1708 lr.push(l2)
1709 1709 continue
1710 1710 newfile = True
1711 1711 context = False
1712 1712 afile = parsefilename(x)
1713 1713 bfile = parsefilename(l2)
1714 1714 elif x.startswith('***'):
1715 1715 # check for a context diff
1716 1716 l2 = lr.readline()
1717 1717 if not l2.startswith('---'):
1718 1718 lr.push(l2)
1719 1719 continue
1720 1720 l3 = lr.readline()
1721 1721 lr.push(l3)
1722 1722 if not l3.startswith("***************"):
1723 1723 lr.push(l2)
1724 1724 continue
1725 1725 newfile = True
1726 1726 context = True
1727 1727 afile = parsefilename(x)
1728 1728 bfile = parsefilename(l2)
1729 1729
1730 1730 if newfile:
1731 1731 newfile = False
1732 1732 emitfile = True
1733 1733 state = BFILE
1734 1734 hunknum = 0
1735 1735
1736 1736 while gitpatches:
1737 1737 gp = gitpatches.pop()
1738 1738 yield 'file', ('a/' + gp.path, 'b/' + gp.path, None, gp.copy())
1739 1739
1740 1740 def applybindelta(binchunk, data):
1741 1741 """Apply a binary delta hunk
1742 1742 The algorithm used is the algorithm from git's patch-delta.c
1743 1743 """
1744 1744 def deltahead(binchunk):
1745 1745 i = 0
1746 1746 for c in binchunk:
1747 1747 i += 1
1748 1748 if not (ord(c) & 0x80):
1749 1749 return i
1750 1750 return i
1751 1751 out = ""
1752 1752 s = deltahead(binchunk)
1753 1753 binchunk = binchunk[s:]
1754 1754 s = deltahead(binchunk)
1755 1755 binchunk = binchunk[s:]
1756 1756 i = 0
1757 1757 while i < len(binchunk):
1758 1758 cmd = ord(binchunk[i])
1759 1759 i += 1
1760 1760 if (cmd & 0x80):
1761 1761 offset = 0
1762 1762 size = 0
1763 1763 if (cmd & 0x01):
1764 1764 offset = ord(binchunk[i])
1765 1765 i += 1
1766 1766 if (cmd & 0x02):
1767 1767 offset |= ord(binchunk[i]) << 8
1768 1768 i += 1
1769 1769 if (cmd & 0x04):
1770 1770 offset |= ord(binchunk[i]) << 16
1771 1771 i += 1
1772 1772 if (cmd & 0x08):
1773 1773 offset |= ord(binchunk[i]) << 24
1774 1774 i += 1
1775 1775 if (cmd & 0x10):
1776 1776 size = ord(binchunk[i])
1777 1777 i += 1
1778 1778 if (cmd & 0x20):
1779 1779 size |= ord(binchunk[i]) << 8
1780 1780 i += 1
1781 1781 if (cmd & 0x40):
1782 1782 size |= ord(binchunk[i]) << 16
1783 1783 i += 1
1784 1784 if size == 0:
1785 1785 size = 0x10000
1786 1786 offset_end = offset + size
1787 1787 out += data[offset:offset_end]
1788 1788 elif cmd != 0:
1789 1789 offset_end = i + cmd
1790 1790 out += binchunk[i:offset_end]
1791 1791 i += cmd
1792 1792 else:
1793 1793 raise PatchError(_('unexpected delta opcode 0'))
1794 1794 return out
1795 1795
1796 1796 def applydiff(ui, fp, backend, store, strip=1, prefix='', eolmode='strict'):
1797 1797 """Reads a patch from fp and tries to apply it.
1798 1798
1799 1799 Returns 0 for a clean patch, -1 if any rejects were found and 1 if
1800 1800 there was any fuzz.
1801 1801
1802 1802 If 'eolmode' is 'strict', the patch content and patched file are
1803 1803 read in binary mode. Otherwise, line endings are ignored when
1804 1804 patching then normalized according to 'eolmode'.
1805 1805 """
1806 1806 return _applydiff(ui, fp, patchfile, backend, store, strip=strip,
1807 1807 prefix=prefix, eolmode=eolmode)
1808 1808
1809 1809 def _applydiff(ui, fp, patcher, backend, store, strip=1, prefix='',
1810 1810 eolmode='strict'):
1811 1811
1812 1812 if prefix:
1813 1813 prefix = pathutil.canonpath(backend.repo.root, backend.repo.getcwd(),
1814 1814 prefix)
1815 1815 if prefix != '':
1816 1816 prefix += '/'
1817 1817 def pstrip(p):
1818 1818 return pathtransform(p, strip - 1, prefix)[1]
1819 1819
1820 1820 rejects = 0
1821 1821 err = 0
1822 1822 current_file = None
1823 1823
1824 1824 for state, values in iterhunks(fp):
1825 1825 if state == 'hunk':
1826 1826 if not current_file:
1827 1827 continue
1828 1828 ret = current_file.apply(values)
1829 1829 if ret > 0:
1830 1830 err = 1
1831 1831 elif state == 'file':
1832 1832 if current_file:
1833 1833 rejects += current_file.close()
1834 1834 current_file = None
1835 1835 afile, bfile, first_hunk, gp = values
1836 1836 if gp:
1837 1837 gp.path = pstrip(gp.path)
1838 1838 if gp.oldpath:
1839 1839 gp.oldpath = pstrip(gp.oldpath)
1840 1840 else:
1841 1841 gp = makepatchmeta(backend, afile, bfile, first_hunk, strip,
1842 1842 prefix)
1843 1843 if gp.op == 'RENAME':
1844 1844 backend.unlink(gp.oldpath)
1845 1845 if not first_hunk:
1846 1846 if gp.op == 'DELETE':
1847 1847 backend.unlink(gp.path)
1848 1848 continue
1849 1849 data, mode = None, None
1850 1850 if gp.op in ('RENAME', 'COPY'):
1851 1851 data, mode = store.getfile(gp.oldpath)[:2]
1852 1852 # FIXME: failing getfile has never been handled here
1853 1853 assert data is not None
1854 1854 if gp.mode:
1855 1855 mode = gp.mode
1856 1856 if gp.op == 'ADD':
1857 1857 # Added files without content have no hunk and
1858 1858 # must be created
1859 1859 data = ''
1860 1860 if data or mode:
1861 1861 if (gp.op in ('ADD', 'RENAME', 'COPY')
1862 1862 and backend.exists(gp.path)):
1863 1863 raise PatchError(_("cannot create %s: destination "
1864 1864 "already exists") % gp.path)
1865 1865 backend.setfile(gp.path, data, mode, gp.oldpath)
1866 1866 continue
1867 1867 try:
1868 1868 current_file = patcher(ui, gp, backend, store,
1869 1869 eolmode=eolmode)
1870 1870 except PatchError, inst:
1871 1871 ui.warn(str(inst) + '\n')
1872 1872 current_file = None
1873 1873 rejects += 1
1874 1874 continue
1875 1875 elif state == 'git':
1876 1876 for gp in values:
1877 1877 path = pstrip(gp.oldpath)
1878 1878 data, mode = backend.getfile(path)
1879 1879 if data is None:
1880 1880 # The error ignored here will trigger a getfile()
1881 1881 # error in a place more appropriate for error
1882 1882 # handling, and will not interrupt the patching
1883 1883 # process.
1884 1884 pass
1885 1885 else:
1886 1886 store.setfile(path, data, mode)
1887 1887 else:
1888 1888 raise util.Abort(_('unsupported parser state: %s') % state)
1889 1889
1890 1890 if current_file:
1891 1891 rejects += current_file.close()
1892 1892
1893 1893 if rejects:
1894 1894 return -1
1895 1895 return err
1896 1896
1897 1897 def _externalpatch(ui, repo, patcher, patchname, strip, files,
1898 1898 similarity):
1899 1899 """use <patcher> to apply <patchname> to the working directory.
1900 1900 returns whether patch was applied with fuzz factor."""
1901 1901
1902 1902 fuzz = False
1903 1903 args = []
1904 1904 cwd = repo.root
1905 1905 if cwd:
1906 1906 args.append('-d %s' % util.shellquote(cwd))
1907 1907 fp = util.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
1908 1908 util.shellquote(patchname)))
1909 1909 try:
1910 1910 for line in fp:
1911 1911 line = line.rstrip()
1912 1912 ui.note(line + '\n')
1913 1913 if line.startswith('patching file '):
1914 1914 pf = util.parsepatchoutput(line)
1915 1915 printed_file = False
1916 1916 files.add(pf)
1917 1917 elif line.find('with fuzz') >= 0:
1918 1918 fuzz = True
1919 1919 if not printed_file:
1920 1920 ui.warn(pf + '\n')
1921 1921 printed_file = True
1922 1922 ui.warn(line + '\n')
1923 1923 elif line.find('saving rejects to file') >= 0:
1924 1924 ui.warn(line + '\n')
1925 1925 elif line.find('FAILED') >= 0:
1926 1926 if not printed_file:
1927 1927 ui.warn(pf + '\n')
1928 1928 printed_file = True
1929 1929 ui.warn(line + '\n')
1930 1930 finally:
1931 1931 if files:
1932 1932 scmutil.marktouched(repo, files, similarity)
1933 1933 code = fp.close()
1934 1934 if code:
1935 1935 raise PatchError(_("patch command failed: %s") %
1936 1936 util.explainexit(code)[0])
1937 1937 return fuzz
1938 1938
1939 1939 def patchbackend(ui, backend, patchobj, strip, prefix, files=None,
1940 1940 eolmode='strict'):
1941 1941 if files is None:
1942 1942 files = set()
1943 1943 if eolmode is None:
1944 1944 eolmode = ui.config('patch', 'eol', 'strict')
1945 1945 if eolmode.lower() not in eolmodes:
1946 1946 raise util.Abort(_('unsupported line endings type: %s') % eolmode)
1947 1947 eolmode = eolmode.lower()
1948 1948
1949 1949 store = filestore()
1950 1950 try:
1951 1951 fp = open(patchobj, 'rb')
1952 1952 except TypeError:
1953 1953 fp = patchobj
1954 1954 try:
1955 1955 ret = applydiff(ui, fp, backend, store, strip=strip, prefix=prefix,
1956 1956 eolmode=eolmode)
1957 1957 finally:
1958 1958 if fp != patchobj:
1959 1959 fp.close()
1960 1960 files.update(backend.close())
1961 1961 store.close()
1962 1962 if ret < 0:
1963 1963 raise PatchError(_('patch failed to apply'))
1964 1964 return ret > 0
1965 1965
1966 1966 def internalpatch(ui, repo, patchobj, strip, prefix='', files=None,
1967 1967 eolmode='strict', similarity=0):
1968 1968 """use builtin patch to apply <patchobj> to the working directory.
1969 1969 returns whether patch was applied with fuzz factor."""
1970 1970 backend = workingbackend(ui, repo, similarity)
1971 1971 return patchbackend(ui, backend, patchobj, strip, prefix, files, eolmode)
1972 1972
1973 1973 def patchrepo(ui, repo, ctx, store, patchobj, strip, prefix, files=None,
1974 1974 eolmode='strict'):
1975 1975 backend = repobackend(ui, repo, ctx, store)
1976 1976 return patchbackend(ui, backend, patchobj, strip, prefix, files, eolmode)
1977 1977
1978 1978 def patch(ui, repo, patchname, strip=1, prefix='', files=None, eolmode='strict',
1979 1979 similarity=0):
1980 1980 """Apply <patchname> to the working directory.
1981 1981
1982 1982 'eolmode' specifies how end of lines should be handled. It can be:
1983 1983 - 'strict': inputs are read in binary mode, EOLs are preserved
1984 1984 - 'crlf': EOLs are ignored when patching and reset to CRLF
1985 1985 - 'lf': EOLs are ignored when patching and reset to LF
1986 1986 - None: get it from user settings, default to 'strict'
1987 1987 'eolmode' is ignored when using an external patcher program.
1988 1988
1989 1989 Returns whether patch was applied with fuzz factor.
1990 1990 """
1991 1991 patcher = ui.config('ui', 'patch')
1992 1992 if files is None:
1993 1993 files = set()
1994 1994 if patcher:
1995 1995 return _externalpatch(ui, repo, patcher, patchname, strip,
1996 1996 files, similarity)
1997 1997 return internalpatch(ui, repo, patchname, strip, prefix, files, eolmode,
1998 1998 similarity)
1999 1999
2000 2000 def changedfiles(ui, repo, patchpath, strip=1):
2001 2001 backend = fsbackend(ui, repo.root)
2002 2002 fp = open(patchpath, 'rb')
2003 2003 try:
2004 2004 changed = set()
2005 2005 for state, values in iterhunks(fp):
2006 2006 if state == 'file':
2007 2007 afile, bfile, first_hunk, gp = values
2008 2008 if gp:
2009 2009 gp.path = pathtransform(gp.path, strip - 1, '')[1]
2010 2010 if gp.oldpath:
2011 2011 gp.oldpath = pathtransform(gp.oldpath, strip - 1, '')[1]
2012 2012 else:
2013 2013 gp = makepatchmeta(backend, afile, bfile, first_hunk, strip,
2014 2014 '')
2015 2015 changed.add(gp.path)
2016 2016 if gp.op == 'RENAME':
2017 2017 changed.add(gp.oldpath)
2018 2018 elif state not in ('hunk', 'git'):
2019 2019 raise util.Abort(_('unsupported parser state: %s') % state)
2020 2020 return changed
2021 2021 finally:
2022 2022 fp.close()
2023 2023
2024 2024 class GitDiffRequired(Exception):
2025 2025 pass
2026 2026
2027 2027 def diffallopts(ui, opts=None, untrusted=False, section='diff'):
2028 2028 '''return diffopts with all features supported and parsed'''
2029 2029 return difffeatureopts(ui, opts=opts, untrusted=untrusted, section=section,
2030 2030 git=True, whitespace=True, formatchanging=True)
2031 2031
2032 2032 diffopts = diffallopts
2033 2033
2034 2034 def difffeatureopts(ui, opts=None, untrusted=False, section='diff', git=False,
2035 2035 whitespace=False, formatchanging=False):
2036 2036 '''return diffopts with only opted-in features parsed
2037 2037
2038 2038 Features:
2039 2039 - git: git-style diffs
2040 2040 - whitespace: whitespace options like ignoreblanklines and ignorews
2041 2041 - formatchanging: options that will likely break or cause correctness issues
2042 2042 with most diff parsers
2043 2043 '''
2044 2044 def get(key, name=None, getter=ui.configbool, forceplain=None):
2045 2045 if opts:
2046 2046 v = opts.get(key)
2047 2047 if v:
2048 2048 return v
2049 2049 if forceplain is not None and ui.plain():
2050 2050 return forceplain
2051 2051 return getter(section, name or key, None, untrusted=untrusted)
2052 2052
2053 2053 # core options, expected to be understood by every diff parser
2054 2054 buildopts = {
2055 2055 'nodates': get('nodates'),
2056 2056 'showfunc': get('show_function', 'showfunc'),
2057 2057 'context': get('unified', getter=ui.config),
2058 2058 }
2059 2059
2060 2060 if git:
2061 2061 buildopts['git'] = get('git')
2062 2062 if whitespace:
2063 2063 buildopts['ignorews'] = get('ignore_all_space', 'ignorews')
2064 2064 buildopts['ignorewsamount'] = get('ignore_space_change',
2065 2065 'ignorewsamount')
2066 2066 buildopts['ignoreblanklines'] = get('ignore_blank_lines',
2067 2067 'ignoreblanklines')
2068 2068 if formatchanging:
2069 2069 buildopts['text'] = opts and opts.get('text')
2070 2070 buildopts['nobinary'] = get('nobinary')
2071 2071 buildopts['noprefix'] = get('noprefix', forceplain=False)
2072 2072
2073 2073 return mdiff.diffopts(**buildopts)
2074 2074
2075 2075 def diff(repo, node1=None, node2=None, match=None, changes=None, opts=None,
2076 2076 losedatafn=None, prefix='', relroot=''):
2077 2077 '''yields diff of changes to files between two nodes, or node and
2078 2078 working directory.
2079 2079
2080 2080 if node1 is None, use first dirstate parent instead.
2081 2081 if node2 is None, compare node1 with working directory.
2082 2082
2083 2083 losedatafn(**kwarg) is a callable run when opts.upgrade=True and
2084 2084 every time some change cannot be represented with the current
2085 2085 patch format. Return False to upgrade to git patch format, True to
2086 2086 accept the loss or raise an exception to abort the diff. It is
2087 2087 called with the name of current file being diffed as 'fn'. If set
2088 2088 to None, patches will always be upgraded to git format when
2089 2089 necessary.
2090 2090
2091 2091 prefix is a filename prefix that is prepended to all filenames on
2092 2092 display (used for subrepos).
2093 2093
2094 2094 relroot, if not empty, must be normalized with a trailing /. Any match
2095 2095 patterns that fall outside it will be ignored.'''
2096 2096
2097 2097 if opts is None:
2098 2098 opts = mdiff.defaultopts
2099 2099
2100 2100 if not node1 and not node2:
2101 2101 node1 = repo.dirstate.p1()
2102 2102
2103 2103 def lrugetfilectx():
2104 2104 cache = {}
2105 2105 order = collections.deque()
2106 2106 def getfilectx(f, ctx):
2107 2107 fctx = ctx.filectx(f, filelog=cache.get(f))
2108 2108 if f not in cache:
2109 2109 if len(cache) > 20:
2110 2110 del cache[order.popleft()]
2111 2111 cache[f] = fctx.filelog()
2112 2112 else:
2113 2113 order.remove(f)
2114 2114 order.append(f)
2115 2115 return fctx
2116 2116 return getfilectx
2117 2117 getfilectx = lrugetfilectx()
2118 2118
2119 2119 ctx1 = repo[node1]
2120 2120 ctx2 = repo[node2]
2121 2121
2122 2122 relfiltered = False
2123 2123 if relroot != '' and match.always():
2124 2124 # as a special case, create a new matcher with just the relroot
2125 2125 pats = [relroot]
2126 2126 match = scmutil.match(ctx2, pats, default='path')
2127 2127 relfiltered = True
2128 2128
2129 2129 if not changes:
2130 2130 changes = repo.status(ctx1, ctx2, match=match)
2131 2131 modified, added, removed = changes[:3]
2132 2132
2133 2133 if not modified and not added and not removed:
2134 2134 return []
2135 2135
2136 2136 if repo.ui.debugflag:
2137 2137 hexfunc = hex
2138 2138 else:
2139 2139 hexfunc = short
2140 2140 revs = [hexfunc(node) for node in [ctx1.node(), ctx2.node()] if node]
2141 2141
2142 2142 copy = {}
2143 2143 if opts.git or opts.upgrade:
2144 2144 copy = copies.pathcopies(ctx1, ctx2, match=match)
2145 2145
2146 2146 if relroot is not None:
2147 2147 if not relfiltered:
2148 2148 # XXX this would ideally be done in the matcher, but that is
2149 2149 # generally meant to 'or' patterns, not 'and' them. In this case we
2150 2150 # need to 'and' all the patterns from the matcher with relroot.
2151 2151 def filterrel(l):
2152 2152 return [f for f in l if f.startswith(relroot)]
2153 2153 modified = filterrel(modified)
2154 2154 added = filterrel(added)
2155 2155 removed = filterrel(removed)
2156 2156 relfiltered = True
2157 2157 # filter out copies where either side isn't inside the relative root
2158 2158 copy = dict(((dst, src) for (dst, src) in copy.iteritems()
2159 2159 if dst.startswith(relroot)
2160 2160 and src.startswith(relroot)))
2161 2161
2162 2162 def difffn(opts, losedata):
2163 2163 return trydiff(repo, revs, ctx1, ctx2, modified, added, removed,
2164 2164 copy, getfilectx, opts, losedata, prefix, relroot)
2165 2165 if opts.upgrade and not opts.git:
2166 2166 try:
2167 2167 def losedata(fn):
2168 2168 if not losedatafn or not losedatafn(fn=fn):
2169 2169 raise GitDiffRequired
2170 2170 # Buffer the whole output until we are sure it can be generated
2171 2171 return list(difffn(opts.copy(git=False), losedata))
2172 2172 except GitDiffRequired:
2173 2173 return difffn(opts.copy(git=True), None)
2174 2174 else:
2175 2175 return difffn(opts, None)
2176 2176
2177 2177 def difflabel(func, *args, **kw):
2178 2178 '''yields 2-tuples of (output, label) based on the output of func()'''
2179 2179 headprefixes = [('diff', 'diff.diffline'),
2180 2180 ('copy', 'diff.extended'),
2181 2181 ('rename', 'diff.extended'),
2182 2182 ('old', 'diff.extended'),
2183 2183 ('new', 'diff.extended'),
2184 2184 ('deleted', 'diff.extended'),
2185 2185 ('---', 'diff.file_a'),
2186 2186 ('+++', 'diff.file_b')]
2187 2187 textprefixes = [('@', 'diff.hunk'),
2188 2188 ('-', 'diff.deleted'),
2189 2189 ('+', 'diff.inserted')]
2190 2190 head = False
2191 2191 for chunk in func(*args, **kw):
2192 2192 lines = chunk.split('\n')
2193 2193 for i, line in enumerate(lines):
2194 2194 if i != 0:
2195 2195 yield ('\n', '')
2196 2196 if head:
2197 2197 if line.startswith('@'):
2198 2198 head = False
2199 2199 else:
2200 2200 if line and line[0] not in ' +-@\\':
2201 2201 head = True
2202 2202 stripline = line
2203 2203 diffline = False
2204 2204 if not head and line and line[0] in '+-':
2205 2205 # highlight tabs and trailing whitespace, but only in
2206 2206 # changed lines
2207 2207 stripline = line.rstrip()
2208 2208 diffline = True
2209 2209
2210 2210 prefixes = textprefixes
2211 2211 if head:
2212 2212 prefixes = headprefixes
2213 2213 for prefix, label in prefixes:
2214 2214 if stripline.startswith(prefix):
2215 2215 if diffline:
2216 2216 for token in tabsplitter.findall(stripline):
2217 2217 if '\t' == token[0]:
2218 2218 yield (token, 'diff.tab')
2219 2219 else:
2220 2220 yield (token, label)
2221 2221 else:
2222 2222 yield (stripline, label)
2223 2223 break
2224 2224 else:
2225 2225 yield (line, '')
2226 2226 if line != stripline:
2227 2227 yield (line[len(stripline):], 'diff.trailingwhitespace')
2228 2228
2229 2229 def diffui(*args, **kw):
2230 2230 '''like diff(), but yields 2-tuples of (output, label) for ui.write()'''
2231 2231 return difflabel(diff, *args, **kw)
2232 2232
2233 2233 def _filepairs(ctx1, modified, added, removed, copy, opts):
2234 2234 '''generates tuples (f1, f2, copyop), where f1 is the name of the file
2235 2235 before and f2 is the the name after. For added files, f1 will be None,
2236 2236 and for removed files, f2 will be None. copyop may be set to None, 'copy'
2237 2237 or 'rename' (the latter two only if opts.git is set).'''
2238 2238 gone = set()
2239 2239
2240 2240 copyto = dict([(v, k) for k, v in copy.items()])
2241 2241
2242 2242 addedset, removedset = set(added), set(removed)
2243 2243 # Fix up added, since merged-in additions appear as
2244 2244 # modifications during merges
2245 2245 for f in modified:
2246 2246 if f not in ctx1:
2247 2247 addedset.add(f)
2248 2248
2249 2249 for f in sorted(modified + added + removed):
2250 2250 copyop = None
2251 2251 f1, f2 = f, f
2252 2252 if f in addedset:
2253 2253 f1 = None
2254 2254 if f in copy:
2255 2255 if opts.git:
2256 2256 f1 = copy[f]
2257 2257 if f1 in removedset and f1 not in gone:
2258 2258 copyop = 'rename'
2259 2259 gone.add(f1)
2260 2260 else:
2261 2261 copyop = 'copy'
2262 2262 elif f in removedset:
2263 2263 f2 = None
2264 2264 if opts.git:
2265 2265 # have we already reported a copy above?
2266 2266 if (f in copyto and copyto[f] in addedset
2267 2267 and copy[copyto[f]] == f):
2268 2268 continue
2269 2269 yield f1, f2, copyop
2270 2270
2271 2271 def trydiff(repo, revs, ctx1, ctx2, modified, added, removed,
2272 2272 copy, getfilectx, opts, losedatafn, prefix, relroot):
2273 2273 '''given input data, generate a diff and yield it in blocks
2274 2274
2275 2275 If generating a diff would lose data like flags or binary data and
2276 2276 losedatafn is not None, it will be called.
2277 2277
2278 2278 relroot is removed and prefix is added to every path in the diff output.
2279 2279
2280 2280 If relroot is not empty, this function expects every path in modified,
2281 2281 added, removed and copy to start with it.'''
2282 2282
2283 2283 def gitindex(text):
2284 2284 if not text:
2285 2285 text = ""
2286 2286 l = len(text)
2287 2287 s = util.sha1('blob %d\0' % l)
2288 2288 s.update(text)
2289 2289 return s.hexdigest()
2290 2290
2291 2291 if opts.noprefix:
2292 2292 aprefix = bprefix = ''
2293 2293 else:
2294 2294 aprefix = 'a/'
2295 2295 bprefix = 'b/'
2296 2296
2297 2297 def diffline(f, revs):
2298 2298 revinfo = ' '.join(["-r %s" % rev for rev in revs])
2299 2299 return 'diff %s %s' % (revinfo, f)
2300 2300
2301 2301 date1 = util.datestr(ctx1.date())
2302 2302 date2 = util.datestr(ctx2.date())
2303 2303
2304 2304 gitmode = {'l': '120000', 'x': '100755', '': '100644'}
2305 2305
2306 2306 if relroot != '' and (repo.ui.configbool('devel', 'all')
2307 2307 or repo.ui.configbool('devel', 'check-relroot')):
2308 2308 for f in modified + added + removed + copy.keys() + copy.values():
2309 2309 if f is not None and not f.startswith(relroot):
2310 2310 raise AssertionError(
2311 2311 "file %s doesn't start with relroot %s" % (f, relroot))
2312 2312
2313 2313 for f1, f2, copyop in _filepairs(
2314 2314 ctx1, modified, added, removed, copy, opts):
2315 2315 content1 = None
2316 2316 content2 = None
2317 2317 flag1 = None
2318 2318 flag2 = None
2319 2319 if f1:
2320 2320 content1 = getfilectx(f1, ctx1).data()
2321 2321 if opts.git or losedatafn:
2322 2322 flag1 = ctx1.flags(f1)
2323 2323 if f2:
2324 2324 content2 = getfilectx(f2, ctx2).data()
2325 2325 if opts.git or losedatafn:
2326 2326 flag2 = ctx2.flags(f2)
2327 2327 binary = False
2328 2328 if opts.git or losedatafn:
2329 2329 binary = util.binary(content1) or util.binary(content2)
2330 2330
2331 2331 if losedatafn and not opts.git:
2332 2332 if (binary or
2333 2333 # copy/rename
2334 2334 f2 in copy or
2335 2335 # empty file creation
2336 2336 (not f1 and not content2) or
2337 2337 # empty file deletion
2338 2338 (not content1 and not f2) or
2339 2339 # create with flags
2340 2340 (not f1 and flag2) or
2341 2341 # change flags
2342 2342 (f1 and f2 and flag1 != flag2)):
2343 2343 losedatafn(f2 or f1)
2344 2344
2345 2345 path1 = f1 or f2
2346 2346 path2 = f2 or f1
2347 2347 path1 = posixpath.join(prefix, path1[len(relroot):])
2348 2348 path2 = posixpath.join(prefix, path2[len(relroot):])
2349 2349 header = []
2350 2350 if opts.git:
2351 2351 header.append('diff --git %s%s %s%s' %
2352 2352 (aprefix, path1, bprefix, path2))
2353 2353 if not f1: # added
2354 2354 header.append('new file mode %s' % gitmode[flag2])
2355 2355 elif not f2: # removed
2356 2356 header.append('deleted file mode %s' % gitmode[flag1])
2357 2357 else: # modified/copied/renamed
2358 2358 mode1, mode2 = gitmode[flag1], gitmode[flag2]
2359 2359 if mode1 != mode2:
2360 2360 header.append('old mode %s' % mode1)
2361 2361 header.append('new mode %s' % mode2)
2362 2362 if copyop is not None:
2363 2363 header.append('%s from %s' % (copyop, path1))
2364 2364 header.append('%s to %s' % (copyop, path2))
2365 2365 elif revs and not repo.ui.quiet:
2366 2366 header.append(diffline(path1, revs))
2367 2367
2368 2368 if binary and opts.git and not opts.nobinary:
2369 2369 text = mdiff.b85diff(content1, content2)
2370 2370 if text:
2371 2371 header.append('index %s..%s' %
2372 2372 (gitindex(content1), gitindex(content2)))
2373 2373 else:
2374 2374 text = mdiff.unidiff(content1, date1,
2375 2375 content2, date2,
2376 2376 path1, path2, opts=opts)
2377 2377 if header and (text or len(header) > 1):
2378 2378 yield '\n'.join(header) + '\n'
2379 2379 if text:
2380 2380 yield text
2381 2381
2382 2382 def diffstatsum(stats):
2383 2383 maxfile, maxtotal, addtotal, removetotal, binary = 0, 0, 0, 0, False
2384 2384 for f, a, r, b in stats:
2385 2385 maxfile = max(maxfile, encoding.colwidth(f))
2386 2386 maxtotal = max(maxtotal, a + r)
2387 2387 addtotal += a
2388 2388 removetotal += r
2389 2389 binary = binary or b
2390 2390
2391 2391 return maxfile, maxtotal, addtotal, removetotal, binary
2392 2392
2393 2393 def diffstatdata(lines):
2394 2394 diffre = re.compile('^diff .*-r [a-z0-9]+\s(.*)$')
2395 2395
2396 2396 results = []
2397 2397 filename, adds, removes, isbinary = None, 0, 0, False
2398 2398
2399 2399 def addresult():
2400 2400 if filename:
2401 2401 results.append((filename, adds, removes, isbinary))
2402 2402
2403 2403 for line in lines:
2404 2404 if line.startswith('diff'):
2405 2405 addresult()
2406 2406 # set numbers to 0 anyway when starting new file
2407 2407 adds, removes, isbinary = 0, 0, False
2408 2408 if line.startswith('diff --git a/'):
2409 2409 filename = gitre.search(line).group(2)
2410 2410 elif line.startswith('diff -r'):
2411 2411 # format: "diff -r ... -r ... filename"
2412 2412 filename = diffre.search(line).group(1)
2413 2413 elif line.startswith('+') and not line.startswith('+++ '):
2414 2414 adds += 1
2415 2415 elif line.startswith('-') and not line.startswith('--- '):
2416 2416 removes += 1
2417 2417 elif (line.startswith('GIT binary patch') or
2418 2418 line.startswith('Binary file')):
2419 2419 isbinary = True
2420 2420 addresult()
2421 2421 return results
2422 2422
2423 2423 def diffstat(lines, width=80, git=False):
2424 2424 output = []
2425 2425 stats = diffstatdata(lines)
2426 2426 maxname, maxtotal, totaladds, totalremoves, hasbinary = diffstatsum(stats)
2427 2427
2428 2428 countwidth = len(str(maxtotal))
2429 2429 if hasbinary and countwidth < 3:
2430 2430 countwidth = 3
2431 2431 graphwidth = width - countwidth - maxname - 6
2432 2432 if graphwidth < 10:
2433 2433 graphwidth = 10
2434 2434
2435 2435 def scale(i):
2436 2436 if maxtotal <= graphwidth:
2437 2437 return i
2438 2438 # If diffstat runs out of room it doesn't print anything,
2439 2439 # which isn't very useful, so always print at least one + or -
2440 2440 # if there were at least some changes.
2441 2441 return max(i * graphwidth // maxtotal, int(bool(i)))
2442 2442
2443 2443 for filename, adds, removes, isbinary in stats:
2444 2444 if isbinary:
2445 2445 count = 'Bin'
2446 2446 else:
2447 2447 count = adds + removes
2448 2448 pluses = '+' * scale(adds)
2449 2449 minuses = '-' * scale(removes)
2450 2450 output.append(' %s%s | %*s %s%s\n' %
2451 2451 (filename, ' ' * (maxname - encoding.colwidth(filename)),
2452 2452 countwidth, count, pluses, minuses))
2453 2453
2454 2454 if stats:
2455 2455 output.append(_(' %d files changed, %d insertions(+), '
2456 2456 '%d deletions(-)\n')
2457 2457 % (len(stats), totaladds, totalremoves))
2458 2458
2459 2459 return ''.join(output)
2460 2460
2461 2461 def diffstatui(*args, **kw):
2462 2462 '''like diffstat(), but yields 2-tuples of (output, label) for
2463 2463 ui.write()
2464 2464 '''
2465 2465
2466 2466 for line in diffstat(*args, **kw).splitlines():
2467 2467 if line and line[-1] in '+-':
2468 2468 name, graph = line.rsplit(' ', 1)
2469 2469 yield (name + ' ', '')
2470 2470 m = re.search(r'\++', graph)
2471 2471 if m:
2472 2472 yield (m.group(0), 'diffstat.inserted')
2473 2473 m = re.search(r'-+', graph)
2474 2474 if m:
2475 2475 yield (m.group(0), 'diffstat.deleted')
2476 2476 else:
2477 2477 yield (line, '')
2478 2478 yield ('\n', '')
General Comments 0
You need to be logged in to leave comments. Login now