##// END OF EJS Templates
patch: add within-line color diff capacity...
Matthieu Laneuville -
r35278:6ba79cf3 default
parent child Browse files
Show More
@@ -1,3972 +1,3972 b''
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import itertools
12 12 import os
13 13 import re
14 14 import tempfile
15 15
16 16 from .i18n import _
17 17 from .node import (
18 18 hex,
19 19 nullid,
20 20 nullrev,
21 21 short,
22 22 )
23 23
24 24 from . import (
25 25 bookmarks,
26 26 changelog,
27 27 copies,
28 28 crecord as crecordmod,
29 29 dagop,
30 30 dirstateguard,
31 31 encoding,
32 32 error,
33 33 formatter,
34 34 graphmod,
35 35 match as matchmod,
36 36 mdiff,
37 37 obsolete,
38 38 patch,
39 39 pathutil,
40 40 pycompat,
41 41 registrar,
42 42 revlog,
43 43 revset,
44 44 scmutil,
45 45 smartset,
46 46 templatekw,
47 47 templater,
48 48 util,
49 49 vfs as vfsmod,
50 50 )
51 51 stringio = util.stringio
52 52
53 53 # templates of common command options
54 54
55 55 dryrunopts = [
56 56 ('n', 'dry-run', None,
57 57 _('do not perform actions, just print output')),
58 58 ]
59 59
60 60 remoteopts = [
61 61 ('e', 'ssh', '',
62 62 _('specify ssh command to use'), _('CMD')),
63 63 ('', 'remotecmd', '',
64 64 _('specify hg command to run on the remote side'), _('CMD')),
65 65 ('', 'insecure', None,
66 66 _('do not verify server certificate (ignoring web.cacerts config)')),
67 67 ]
68 68
69 69 walkopts = [
70 70 ('I', 'include', [],
71 71 _('include names matching the given patterns'), _('PATTERN')),
72 72 ('X', 'exclude', [],
73 73 _('exclude names matching the given patterns'), _('PATTERN')),
74 74 ]
75 75
76 76 commitopts = [
77 77 ('m', 'message', '',
78 78 _('use text as commit message'), _('TEXT')),
79 79 ('l', 'logfile', '',
80 80 _('read commit message from file'), _('FILE')),
81 81 ]
82 82
83 83 commitopts2 = [
84 84 ('d', 'date', '',
85 85 _('record the specified date as commit date'), _('DATE')),
86 86 ('u', 'user', '',
87 87 _('record the specified user as committer'), _('USER')),
88 88 ]
89 89
90 90 # hidden for now
91 91 formatteropts = [
92 92 ('T', 'template', '',
93 93 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
94 94 ]
95 95
96 96 templateopts = [
97 97 ('', 'style', '',
98 98 _('display using template map file (DEPRECATED)'), _('STYLE')),
99 99 ('T', 'template', '',
100 100 _('display with template'), _('TEMPLATE')),
101 101 ]
102 102
103 103 logopts = [
104 104 ('p', 'patch', None, _('show patch')),
105 105 ('g', 'git', None, _('use git extended diff format')),
106 106 ('l', 'limit', '',
107 107 _('limit number of changes displayed'), _('NUM')),
108 108 ('M', 'no-merges', None, _('do not show merges')),
109 109 ('', 'stat', None, _('output diffstat-style summary of changes')),
110 110 ('G', 'graph', None, _("show the revision DAG")),
111 111 ] + templateopts
112 112
113 113 diffopts = [
114 114 ('a', 'text', None, _('treat all files as text')),
115 115 ('g', 'git', None, _('use git extended diff format')),
116 116 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
117 117 ('', 'nodates', None, _('omit dates from diff headers'))
118 118 ]
119 119
120 120 diffwsopts = [
121 121 ('w', 'ignore-all-space', None,
122 122 _('ignore white space when comparing lines')),
123 123 ('b', 'ignore-space-change', None,
124 124 _('ignore changes in the amount of white space')),
125 125 ('B', 'ignore-blank-lines', None,
126 126 _('ignore changes whose lines are all blank')),
127 127 ('Z', 'ignore-space-at-eol', None,
128 128 _('ignore changes in whitespace at EOL')),
129 129 ]
130 130
131 131 diffopts2 = [
132 132 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
133 133 ('p', 'show-function', None, _('show which function each change is in')),
134 134 ('', 'reverse', None, _('produce a diff that undoes the changes')),
135 135 ] + diffwsopts + [
136 136 ('U', 'unified', '',
137 137 _('number of lines of context to show'), _('NUM')),
138 138 ('', 'stat', None, _('output diffstat-style summary of changes')),
139 139 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
140 140 ]
141 141
142 142 mergetoolopts = [
143 143 ('t', 'tool', '', _('specify merge tool')),
144 144 ]
145 145
146 146 similarityopts = [
147 147 ('s', 'similarity', '',
148 148 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
149 149 ]
150 150
151 151 subrepoopts = [
152 152 ('S', 'subrepos', None,
153 153 _('recurse into subrepositories'))
154 154 ]
155 155
156 156 debugrevlogopts = [
157 157 ('c', 'changelog', False, _('open changelog')),
158 158 ('m', 'manifest', False, _('open manifest')),
159 159 ('', 'dir', '', _('open directory manifest')),
160 160 ]
161 161
162 162 # special string such that everything below this line will be ingored in the
163 163 # editor text
164 164 _linebelow = "^HG: ------------------------ >8 ------------------------$"
165 165
166 166 def ishunk(x):
167 167 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
168 168 return isinstance(x, hunkclasses)
169 169
170 170 def newandmodified(chunks, originalchunks):
171 171 newlyaddedandmodifiedfiles = set()
172 172 for chunk in chunks:
173 173 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
174 174 originalchunks:
175 175 newlyaddedandmodifiedfiles.add(chunk.header.filename())
176 176 return newlyaddedandmodifiedfiles
177 177
178 178 def parsealiases(cmd):
179 179 return cmd.lstrip("^").split("|")
180 180
181 181 def setupwrapcolorwrite(ui):
182 182 # wrap ui.write so diff output can be labeled/colorized
183 183 def wrapwrite(orig, *args, **kw):
184 184 label = kw.pop('label', '')
185 185 for chunk, l in patch.difflabel(lambda: args):
186 186 orig(chunk, label=label + l)
187 187
188 188 oldwrite = ui.write
189 189 def wrap(*args, **kwargs):
190 190 return wrapwrite(oldwrite, *args, **kwargs)
191 191 setattr(ui, 'write', wrap)
192 192 return oldwrite
193 193
194 194 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
195 195 if usecurses:
196 196 if testfile:
197 197 recordfn = crecordmod.testdecorator(testfile,
198 198 crecordmod.testchunkselector)
199 199 else:
200 200 recordfn = crecordmod.chunkselector
201 201
202 202 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
203 203
204 204 else:
205 205 return patch.filterpatch(ui, originalhunks, operation)
206 206
207 207 def recordfilter(ui, originalhunks, operation=None):
208 208 """ Prompts the user to filter the originalhunks and return a list of
209 209 selected hunks.
210 210 *operation* is used for to build ui messages to indicate the user what
211 211 kind of filtering they are doing: reverting, committing, shelving, etc.
212 212 (see patch.filterpatch).
213 213 """
214 214 usecurses = crecordmod.checkcurses(ui)
215 215 testfile = ui.config('experimental', 'crecordtest')
216 216 oldwrite = setupwrapcolorwrite(ui)
217 217 try:
218 218 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
219 219 testfile, operation)
220 220 finally:
221 221 ui.write = oldwrite
222 222 return newchunks, newopts
223 223
224 224 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
225 225 filterfn, *pats, **opts):
226 226 from . import merge as mergemod
227 227 opts = pycompat.byteskwargs(opts)
228 228 if not ui.interactive():
229 229 if cmdsuggest:
230 230 msg = _('running non-interactively, use %s instead') % cmdsuggest
231 231 else:
232 232 msg = _('running non-interactively')
233 233 raise error.Abort(msg)
234 234
235 235 # make sure username is set before going interactive
236 236 if not opts.get('user'):
237 237 ui.username() # raise exception, username not provided
238 238
239 239 def recordfunc(ui, repo, message, match, opts):
240 240 """This is generic record driver.
241 241
242 242 Its job is to interactively filter local changes, and
243 243 accordingly prepare working directory into a state in which the
244 244 job can be delegated to a non-interactive commit command such as
245 245 'commit' or 'qrefresh'.
246 246
247 247 After the actual job is done by non-interactive command, the
248 248 working directory is restored to its original state.
249 249
250 250 In the end we'll record interesting changes, and everything else
251 251 will be left in place, so the user can continue working.
252 252 """
253 253
254 254 checkunfinished(repo, commit=True)
255 255 wctx = repo[None]
256 256 merge = len(wctx.parents()) > 1
257 257 if merge:
258 258 raise error.Abort(_('cannot partially commit a merge '
259 259 '(use "hg commit" instead)'))
260 260
261 261 def fail(f, msg):
262 262 raise error.Abort('%s: %s' % (f, msg))
263 263
264 264 force = opts.get('force')
265 265 if not force:
266 266 vdirs = []
267 267 match.explicitdir = vdirs.append
268 268 match.bad = fail
269 269
270 270 status = repo.status(match=match)
271 271 if not force:
272 272 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
273 273 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
274 274 diffopts.nodates = True
275 275 diffopts.git = True
276 276 diffopts.showfunc = True
277 277 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
278 278 originalchunks = patch.parsepatch(originaldiff)
279 279
280 280 # 1. filter patch, since we are intending to apply subset of it
281 281 try:
282 282 chunks, newopts = filterfn(ui, originalchunks)
283 283 except error.PatchError as err:
284 284 raise error.Abort(_('error parsing patch: %s') % err)
285 285 opts.update(newopts)
286 286
287 287 # We need to keep a backup of files that have been newly added and
288 288 # modified during the recording process because there is a previous
289 289 # version without the edit in the workdir
290 290 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
291 291 contenders = set()
292 292 for h in chunks:
293 293 try:
294 294 contenders.update(set(h.files()))
295 295 except AttributeError:
296 296 pass
297 297
298 298 changed = status.modified + status.added + status.removed
299 299 newfiles = [f for f in changed if f in contenders]
300 300 if not newfiles:
301 301 ui.status(_('no changes to record\n'))
302 302 return 0
303 303
304 304 modified = set(status.modified)
305 305
306 306 # 2. backup changed files, so we can restore them in the end
307 307
308 308 if backupall:
309 309 tobackup = changed
310 310 else:
311 311 tobackup = [f for f in newfiles if f in modified or f in \
312 312 newlyaddedandmodifiedfiles]
313 313 backups = {}
314 314 if tobackup:
315 315 backupdir = repo.vfs.join('record-backups')
316 316 try:
317 317 os.mkdir(backupdir)
318 318 except OSError as err:
319 319 if err.errno != errno.EEXIST:
320 320 raise
321 321 try:
322 322 # backup continues
323 323 for f in tobackup:
324 324 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
325 325 dir=backupdir)
326 326 os.close(fd)
327 327 ui.debug('backup %r as %r\n' % (f, tmpname))
328 328 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
329 329 backups[f] = tmpname
330 330
331 331 fp = stringio()
332 332 for c in chunks:
333 333 fname = c.filename()
334 334 if fname in backups:
335 335 c.write(fp)
336 336 dopatch = fp.tell()
337 337 fp.seek(0)
338 338
339 339 # 2.5 optionally review / modify patch in text editor
340 340 if opts.get('review', False):
341 341 patchtext = (crecordmod.diffhelptext
342 342 + crecordmod.patchhelptext
343 343 + fp.read())
344 344 reviewedpatch = ui.edit(patchtext, "",
345 345 action="diff",
346 346 repopath=repo.path)
347 347 fp.truncate(0)
348 348 fp.write(reviewedpatch)
349 349 fp.seek(0)
350 350
351 351 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
352 352 # 3a. apply filtered patch to clean repo (clean)
353 353 if backups:
354 354 # Equivalent to hg.revert
355 355 m = scmutil.matchfiles(repo, backups.keys())
356 356 mergemod.update(repo, repo.dirstate.p1(),
357 357 False, True, matcher=m)
358 358
359 359 # 3b. (apply)
360 360 if dopatch:
361 361 try:
362 362 ui.debug('applying patch\n')
363 363 ui.debug(fp.getvalue())
364 364 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
365 365 except error.PatchError as err:
366 366 raise error.Abort(str(err))
367 367 del fp
368 368
369 369 # 4. We prepared working directory according to filtered
370 370 # patch. Now is the time to delegate the job to
371 371 # commit/qrefresh or the like!
372 372
373 373 # Make all of the pathnames absolute.
374 374 newfiles = [repo.wjoin(nf) for nf in newfiles]
375 375 return commitfunc(ui, repo, *newfiles, **opts)
376 376 finally:
377 377 # 5. finally restore backed-up files
378 378 try:
379 379 dirstate = repo.dirstate
380 380 for realname, tmpname in backups.iteritems():
381 381 ui.debug('restoring %r to %r\n' % (tmpname, realname))
382 382
383 383 if dirstate[realname] == 'n':
384 384 # without normallookup, restoring timestamp
385 385 # may cause partially committed files
386 386 # to be treated as unmodified
387 387 dirstate.normallookup(realname)
388 388
389 389 # copystat=True here and above are a hack to trick any
390 390 # editors that have f open that we haven't modified them.
391 391 #
392 392 # Also note that this racy as an editor could notice the
393 393 # file's mtime before we've finished writing it.
394 394 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
395 395 os.unlink(tmpname)
396 396 if tobackup:
397 397 os.rmdir(backupdir)
398 398 except OSError:
399 399 pass
400 400
401 401 def recordinwlock(ui, repo, message, match, opts):
402 402 with repo.wlock():
403 403 return recordfunc(ui, repo, message, match, opts)
404 404
405 405 return commit(ui, repo, recordinwlock, pats, opts)
406 406
407 407 class dirnode(object):
408 408 """
409 409 Represent a directory in user working copy with information required for
410 410 the purpose of tersing its status.
411 411
412 412 path is the path to the directory
413 413
414 414 statuses is a set of statuses of all files in this directory (this includes
415 415 all the files in all the subdirectories too)
416 416
417 417 files is a list of files which are direct child of this directory
418 418
419 419 subdirs is a dictionary of sub-directory name as the key and it's own
420 420 dirnode object as the value
421 421 """
422 422
423 423 def __init__(self, dirpath):
424 424 self.path = dirpath
425 425 self.statuses = set([])
426 426 self.files = []
427 427 self.subdirs = {}
428 428
429 429 def _addfileindir(self, filename, status):
430 430 """Add a file in this directory as a direct child."""
431 431 self.files.append((filename, status))
432 432
433 433 def addfile(self, filename, status):
434 434 """
435 435 Add a file to this directory or to its direct parent directory.
436 436
437 437 If the file is not direct child of this directory, we traverse to the
438 438 directory of which this file is a direct child of and add the file
439 439 there.
440 440 """
441 441
442 442 # the filename contains a path separator, it means it's not the direct
443 443 # child of this directory
444 444 if '/' in filename:
445 445 subdir, filep = filename.split('/', 1)
446 446
447 447 # does the dirnode object for subdir exists
448 448 if subdir not in self.subdirs:
449 449 subdirpath = os.path.join(self.path, subdir)
450 450 self.subdirs[subdir] = dirnode(subdirpath)
451 451
452 452 # try adding the file in subdir
453 453 self.subdirs[subdir].addfile(filep, status)
454 454
455 455 else:
456 456 self._addfileindir(filename, status)
457 457
458 458 if status not in self.statuses:
459 459 self.statuses.add(status)
460 460
461 461 def iterfilepaths(self):
462 462 """Yield (status, path) for files directly under this directory."""
463 463 for f, st in self.files:
464 464 yield st, os.path.join(self.path, f)
465 465
466 466 def tersewalk(self, terseargs):
467 467 """
468 468 Yield (status, path) obtained by processing the status of this
469 469 dirnode.
470 470
471 471 terseargs is the string of arguments passed by the user with `--terse`
472 472 flag.
473 473
474 474 Following are the cases which can happen:
475 475
476 476 1) All the files in the directory (including all the files in its
477 477 subdirectories) share the same status and the user has asked us to terse
478 478 that status. -> yield (status, dirpath)
479 479
480 480 2) Otherwise, we do following:
481 481
482 482 a) Yield (status, filepath) for all the files which are in this
483 483 directory (only the ones in this directory, not the subdirs)
484 484
485 485 b) Recurse the function on all the subdirectories of this
486 486 directory
487 487 """
488 488
489 489 if len(self.statuses) == 1:
490 490 onlyst = self.statuses.pop()
491 491
492 492 # Making sure we terse only when the status abbreviation is
493 493 # passed as terse argument
494 494 if onlyst in terseargs:
495 495 yield onlyst, self.path + pycompat.ossep
496 496 return
497 497
498 498 # add the files to status list
499 499 for st, fpath in self.iterfilepaths():
500 500 yield st, fpath
501 501
502 502 #recurse on the subdirs
503 503 for dirobj in self.subdirs.values():
504 504 for st, fpath in dirobj.tersewalk(terseargs):
505 505 yield st, fpath
506 506
507 507 def tersedir(statuslist, terseargs):
508 508 """
509 509 Terse the status if all the files in a directory shares the same status.
510 510
511 511 statuslist is scmutil.status() object which contains a list of files for
512 512 each status.
513 513 terseargs is string which is passed by the user as the argument to `--terse`
514 514 flag.
515 515
516 516 The function makes a tree of objects of dirnode class, and at each node it
517 517 stores the information required to know whether we can terse a certain
518 518 directory or not.
519 519 """
520 520 # the order matters here as that is used to produce final list
521 521 allst = ('m', 'a', 'r', 'd', 'u', 'i', 'c')
522 522
523 523 # checking the argument validity
524 524 for s in pycompat.bytestr(terseargs):
525 525 if s not in allst:
526 526 raise error.Abort(_("'%s' not recognized") % s)
527 527
528 528 # creating a dirnode object for the root of the repo
529 529 rootobj = dirnode('')
530 530 pstatus = ('modified', 'added', 'deleted', 'clean', 'unknown',
531 531 'ignored', 'removed')
532 532
533 533 tersedict = {}
534 534 for attrname in pstatus:
535 535 statuschar = attrname[0:1]
536 536 for f in getattr(statuslist, attrname):
537 537 rootobj.addfile(f, statuschar)
538 538 tersedict[statuschar] = []
539 539
540 540 # we won't be tersing the root dir, so add files in it
541 541 for st, fpath in rootobj.iterfilepaths():
542 542 tersedict[st].append(fpath)
543 543
544 544 # process each sub-directory and build tersedict
545 545 for subdir in rootobj.subdirs.values():
546 546 for st, f in subdir.tersewalk(terseargs):
547 547 tersedict[st].append(f)
548 548
549 549 tersedlist = []
550 550 for st in allst:
551 551 tersedict[st].sort()
552 552 tersedlist.append(tersedict[st])
553 553
554 554 return tersedlist
555 555
556 556 def _commentlines(raw):
557 557 '''Surround lineswith a comment char and a new line'''
558 558 lines = raw.splitlines()
559 559 commentedlines = ['# %s' % line for line in lines]
560 560 return '\n'.join(commentedlines) + '\n'
561 561
562 562 def _conflictsmsg(repo):
563 563 # avoid merge cycle
564 564 from . import merge as mergemod
565 565 mergestate = mergemod.mergestate.read(repo)
566 566 if not mergestate.active():
567 567 return
568 568
569 569 m = scmutil.match(repo[None])
570 570 unresolvedlist = [f for f in mergestate.unresolved() if m(f)]
571 571 if unresolvedlist:
572 572 mergeliststr = '\n'.join(
573 573 [' %s' % util.pathto(repo.root, pycompat.getcwd(), path)
574 574 for path in unresolvedlist])
575 575 msg = _('''Unresolved merge conflicts:
576 576
577 577 %s
578 578
579 579 To mark files as resolved: hg resolve --mark FILE''') % mergeliststr
580 580 else:
581 581 msg = _('No unresolved merge conflicts.')
582 582
583 583 return _commentlines(msg)
584 584
585 585 def _helpmessage(continuecmd, abortcmd):
586 586 msg = _('To continue: %s\n'
587 587 'To abort: %s') % (continuecmd, abortcmd)
588 588 return _commentlines(msg)
589 589
590 590 def _rebasemsg():
591 591 return _helpmessage('hg rebase --continue', 'hg rebase --abort')
592 592
593 593 def _histeditmsg():
594 594 return _helpmessage('hg histedit --continue', 'hg histedit --abort')
595 595
596 596 def _unshelvemsg():
597 597 return _helpmessage('hg unshelve --continue', 'hg unshelve --abort')
598 598
599 599 def _updatecleanmsg(dest=None):
600 600 warning = _('warning: this will discard uncommitted changes')
601 601 return 'hg update --clean %s (%s)' % (dest or '.', warning)
602 602
603 603 def _graftmsg():
604 604 # tweakdefaults requires `update` to have a rev hence the `.`
605 605 return _helpmessage('hg graft --continue', _updatecleanmsg())
606 606
607 607 def _mergemsg():
608 608 # tweakdefaults requires `update` to have a rev hence the `.`
609 609 return _helpmessage('hg commit', _updatecleanmsg())
610 610
611 611 def _bisectmsg():
612 612 msg = _('To mark the changeset good: hg bisect --good\n'
613 613 'To mark the changeset bad: hg bisect --bad\n'
614 614 'To abort: hg bisect --reset\n')
615 615 return _commentlines(msg)
616 616
617 617 def fileexistspredicate(filename):
618 618 return lambda repo: repo.vfs.exists(filename)
619 619
620 620 def _mergepredicate(repo):
621 621 return len(repo[None].parents()) > 1
622 622
623 623 STATES = (
624 624 # (state, predicate to detect states, helpful message function)
625 625 ('histedit', fileexistspredicate('histedit-state'), _histeditmsg),
626 626 ('bisect', fileexistspredicate('bisect.state'), _bisectmsg),
627 627 ('graft', fileexistspredicate('graftstate'), _graftmsg),
628 628 ('unshelve', fileexistspredicate('unshelverebasestate'), _unshelvemsg),
629 629 ('rebase', fileexistspredicate('rebasestate'), _rebasemsg),
630 630 # The merge state is part of a list that will be iterated over.
631 631 # They need to be last because some of the other unfinished states may also
632 632 # be in a merge or update state (eg. rebase, histedit, graft, etc).
633 633 # We want those to have priority.
634 634 ('merge', _mergepredicate, _mergemsg),
635 635 )
636 636
637 637 def _getrepostate(repo):
638 638 # experimental config: commands.status.skipstates
639 639 skip = set(repo.ui.configlist('commands', 'status.skipstates'))
640 640 for state, statedetectionpredicate, msgfn in STATES:
641 641 if state in skip:
642 642 continue
643 643 if statedetectionpredicate(repo):
644 644 return (state, statedetectionpredicate, msgfn)
645 645
646 646 def morestatus(repo, fm):
647 647 statetuple = _getrepostate(repo)
648 648 label = 'status.morestatus'
649 649 if statetuple:
650 650 fm.startitem()
651 651 state, statedetectionpredicate, helpfulmsg = statetuple
652 652 statemsg = _('The repository is in an unfinished *%s* state.') % state
653 653 fm.write('statemsg', '%s\n', _commentlines(statemsg), label=label)
654 654 conmsg = _conflictsmsg(repo)
655 655 if conmsg:
656 656 fm.write('conflictsmsg', '%s\n', conmsg, label=label)
657 657 if helpfulmsg:
658 658 helpmsg = helpfulmsg()
659 659 fm.write('helpmsg', '%s\n', helpmsg, label=label)
660 660
661 661 def findpossible(cmd, table, strict=False):
662 662 """
663 663 Return cmd -> (aliases, command table entry)
664 664 for each matching command.
665 665 Return debug commands (or their aliases) only if no normal command matches.
666 666 """
667 667 choice = {}
668 668 debugchoice = {}
669 669
670 670 if cmd in table:
671 671 # short-circuit exact matches, "log" alias beats "^log|history"
672 672 keys = [cmd]
673 673 else:
674 674 keys = table.keys()
675 675
676 676 allcmds = []
677 677 for e in keys:
678 678 aliases = parsealiases(e)
679 679 allcmds.extend(aliases)
680 680 found = None
681 681 if cmd in aliases:
682 682 found = cmd
683 683 elif not strict:
684 684 for a in aliases:
685 685 if a.startswith(cmd):
686 686 found = a
687 687 break
688 688 if found is not None:
689 689 if aliases[0].startswith("debug") or found.startswith("debug"):
690 690 debugchoice[found] = (aliases, table[e])
691 691 else:
692 692 choice[found] = (aliases, table[e])
693 693
694 694 if not choice and debugchoice:
695 695 choice = debugchoice
696 696
697 697 return choice, allcmds
698 698
699 699 def findcmd(cmd, table, strict=True):
700 700 """Return (aliases, command table entry) for command string."""
701 701 choice, allcmds = findpossible(cmd, table, strict)
702 702
703 703 if cmd in choice:
704 704 return choice[cmd]
705 705
706 706 if len(choice) > 1:
707 707 clist = sorted(choice)
708 708 raise error.AmbiguousCommand(cmd, clist)
709 709
710 710 if choice:
711 711 return list(choice.values())[0]
712 712
713 713 raise error.UnknownCommand(cmd, allcmds)
714 714
715 715 def findrepo(p):
716 716 while not os.path.isdir(os.path.join(p, ".hg")):
717 717 oldp, p = p, os.path.dirname(p)
718 718 if p == oldp:
719 719 return None
720 720
721 721 return p
722 722
723 723 def bailifchanged(repo, merge=True, hint=None):
724 724 """ enforce the precondition that working directory must be clean.
725 725
726 726 'merge' can be set to false if a pending uncommitted merge should be
727 727 ignored (such as when 'update --check' runs).
728 728
729 729 'hint' is the usual hint given to Abort exception.
730 730 """
731 731
732 732 if merge and repo.dirstate.p2() != nullid:
733 733 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
734 734 modified, added, removed, deleted = repo.status()[:4]
735 735 if modified or added or removed or deleted:
736 736 raise error.Abort(_('uncommitted changes'), hint=hint)
737 737 ctx = repo[None]
738 738 for s in sorted(ctx.substate):
739 739 ctx.sub(s).bailifchanged(hint=hint)
740 740
741 741 def logmessage(ui, opts):
742 742 """ get the log message according to -m and -l option """
743 743 message = opts.get('message')
744 744 logfile = opts.get('logfile')
745 745
746 746 if message and logfile:
747 747 raise error.Abort(_('options --message and --logfile are mutually '
748 748 'exclusive'))
749 749 if not message and logfile:
750 750 try:
751 751 if isstdiofilename(logfile):
752 752 message = ui.fin.read()
753 753 else:
754 754 message = '\n'.join(util.readfile(logfile).splitlines())
755 755 except IOError as inst:
756 756 raise error.Abort(_("can't read commit message '%s': %s") %
757 757 (logfile, encoding.strtolocal(inst.strerror)))
758 758 return message
759 759
760 760 def mergeeditform(ctxorbool, baseformname):
761 761 """return appropriate editform name (referencing a committemplate)
762 762
763 763 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
764 764 merging is committed.
765 765
766 766 This returns baseformname with '.merge' appended if it is a merge,
767 767 otherwise '.normal' is appended.
768 768 """
769 769 if isinstance(ctxorbool, bool):
770 770 if ctxorbool:
771 771 return baseformname + ".merge"
772 772 elif 1 < len(ctxorbool.parents()):
773 773 return baseformname + ".merge"
774 774
775 775 return baseformname + ".normal"
776 776
777 777 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
778 778 editform='', **opts):
779 779 """get appropriate commit message editor according to '--edit' option
780 780
781 781 'finishdesc' is a function to be called with edited commit message
782 782 (= 'description' of the new changeset) just after editing, but
783 783 before checking empty-ness. It should return actual text to be
784 784 stored into history. This allows to change description before
785 785 storing.
786 786
787 787 'extramsg' is a extra message to be shown in the editor instead of
788 788 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
789 789 is automatically added.
790 790
791 791 'editform' is a dot-separated list of names, to distinguish
792 792 the purpose of commit text editing.
793 793
794 794 'getcommiteditor' returns 'commitforceeditor' regardless of
795 795 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
796 796 they are specific for usage in MQ.
797 797 """
798 798 if edit or finishdesc or extramsg:
799 799 return lambda r, c, s: commitforceeditor(r, c, s,
800 800 finishdesc=finishdesc,
801 801 extramsg=extramsg,
802 802 editform=editform)
803 803 elif editform:
804 804 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
805 805 else:
806 806 return commiteditor
807 807
808 808 def loglimit(opts):
809 809 """get the log limit according to option -l/--limit"""
810 810 limit = opts.get('limit')
811 811 if limit:
812 812 try:
813 813 limit = int(limit)
814 814 except ValueError:
815 815 raise error.Abort(_('limit must be a positive integer'))
816 816 if limit <= 0:
817 817 raise error.Abort(_('limit must be positive'))
818 818 else:
819 819 limit = None
820 820 return limit
821 821
822 822 def makefilename(repo, pat, node, desc=None,
823 823 total=None, seqno=None, revwidth=None, pathname=None):
824 824 node_expander = {
825 825 'H': lambda: hex(node),
826 826 'R': lambda: '%d' % repo.changelog.rev(node),
827 827 'h': lambda: short(node),
828 828 'm': lambda: re.sub('[^\w]', '_', desc or '')
829 829 }
830 830 expander = {
831 831 '%': lambda: '%',
832 832 'b': lambda: os.path.basename(repo.root),
833 833 }
834 834
835 835 try:
836 836 if node:
837 837 expander.update(node_expander)
838 838 if node:
839 839 expander['r'] = (lambda:
840 840 ('%d' % repo.changelog.rev(node)).zfill(revwidth or 0))
841 841 if total is not None:
842 842 expander['N'] = lambda: '%d' % total
843 843 if seqno is not None:
844 844 expander['n'] = lambda: '%d' % seqno
845 845 if total is not None and seqno is not None:
846 846 expander['n'] = (lambda: ('%d' % seqno).zfill(len('%d' % total)))
847 847 if pathname is not None:
848 848 expander['s'] = lambda: os.path.basename(pathname)
849 849 expander['d'] = lambda: os.path.dirname(pathname) or '.'
850 850 expander['p'] = lambda: pathname
851 851
852 852 newname = []
853 853 patlen = len(pat)
854 854 i = 0
855 855 while i < patlen:
856 856 c = pat[i:i + 1]
857 857 if c == '%':
858 858 i += 1
859 859 c = pat[i:i + 1]
860 860 c = expander[c]()
861 861 newname.append(c)
862 862 i += 1
863 863 return ''.join(newname)
864 864 except KeyError as inst:
865 865 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
866 866 inst.args[0])
867 867
868 868 def isstdiofilename(pat):
869 869 """True if the given pat looks like a filename denoting stdin/stdout"""
870 870 return not pat or pat == '-'
871 871
872 872 class _unclosablefile(object):
873 873 def __init__(self, fp):
874 874 self._fp = fp
875 875
876 876 def close(self):
877 877 pass
878 878
879 879 def __iter__(self):
880 880 return iter(self._fp)
881 881
882 882 def __getattr__(self, attr):
883 883 return getattr(self._fp, attr)
884 884
885 885 def __enter__(self):
886 886 return self
887 887
888 888 def __exit__(self, exc_type, exc_value, exc_tb):
889 889 pass
890 890
891 891 def makefileobj(repo, pat, node=None, desc=None, total=None,
892 892 seqno=None, revwidth=None, mode='wb', modemap=None,
893 893 pathname=None):
894 894
895 895 writable = mode not in ('r', 'rb')
896 896
897 897 if isstdiofilename(pat):
898 898 if writable:
899 899 fp = repo.ui.fout
900 900 else:
901 901 fp = repo.ui.fin
902 902 return _unclosablefile(fp)
903 903 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
904 904 if modemap is not None:
905 905 mode = modemap.get(fn, mode)
906 906 if mode == 'wb':
907 907 modemap[fn] = 'ab'
908 908 return open(fn, mode)
909 909
910 910 def openrevlog(repo, cmd, file_, opts):
911 911 """opens the changelog, manifest, a filelog or a given revlog"""
912 912 cl = opts['changelog']
913 913 mf = opts['manifest']
914 914 dir = opts['dir']
915 915 msg = None
916 916 if cl and mf:
917 917 msg = _('cannot specify --changelog and --manifest at the same time')
918 918 elif cl and dir:
919 919 msg = _('cannot specify --changelog and --dir at the same time')
920 920 elif cl or mf or dir:
921 921 if file_:
922 922 msg = _('cannot specify filename with --changelog or --manifest')
923 923 elif not repo:
924 924 msg = _('cannot specify --changelog or --manifest or --dir '
925 925 'without a repository')
926 926 if msg:
927 927 raise error.Abort(msg)
928 928
929 929 r = None
930 930 if repo:
931 931 if cl:
932 932 r = repo.unfiltered().changelog
933 933 elif dir:
934 934 if 'treemanifest' not in repo.requirements:
935 935 raise error.Abort(_("--dir can only be used on repos with "
936 936 "treemanifest enabled"))
937 937 dirlog = repo.manifestlog._revlog.dirlog(dir)
938 938 if len(dirlog):
939 939 r = dirlog
940 940 elif mf:
941 941 r = repo.manifestlog._revlog
942 942 elif file_:
943 943 filelog = repo.file(file_)
944 944 if len(filelog):
945 945 r = filelog
946 946 if not r:
947 947 if not file_:
948 948 raise error.CommandError(cmd, _('invalid arguments'))
949 949 if not os.path.isfile(file_):
950 950 raise error.Abort(_("revlog '%s' not found") % file_)
951 951 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
952 952 file_[:-2] + ".i")
953 953 return r
954 954
955 955 def copy(ui, repo, pats, opts, rename=False):
956 956 # called with the repo lock held
957 957 #
958 958 # hgsep => pathname that uses "/" to separate directories
959 959 # ossep => pathname that uses os.sep to separate directories
960 960 cwd = repo.getcwd()
961 961 targets = {}
962 962 after = opts.get("after")
963 963 dryrun = opts.get("dry_run")
964 964 wctx = repo[None]
965 965
966 966 def walkpat(pat):
967 967 srcs = []
968 968 if after:
969 969 badstates = '?'
970 970 else:
971 971 badstates = '?r'
972 972 m = scmutil.match(wctx, [pat], opts, globbed=True)
973 973 for abs in wctx.walk(m):
974 974 state = repo.dirstate[abs]
975 975 rel = m.rel(abs)
976 976 exact = m.exact(abs)
977 977 if state in badstates:
978 978 if exact and state == '?':
979 979 ui.warn(_('%s: not copying - file is not managed\n') % rel)
980 980 if exact and state == 'r':
981 981 ui.warn(_('%s: not copying - file has been marked for'
982 982 ' remove\n') % rel)
983 983 continue
984 984 # abs: hgsep
985 985 # rel: ossep
986 986 srcs.append((abs, rel, exact))
987 987 return srcs
988 988
989 989 # abssrc: hgsep
990 990 # relsrc: ossep
991 991 # otarget: ossep
992 992 def copyfile(abssrc, relsrc, otarget, exact):
993 993 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
994 994 if '/' in abstarget:
995 995 # We cannot normalize abstarget itself, this would prevent
996 996 # case only renames, like a => A.
997 997 abspath, absname = abstarget.rsplit('/', 1)
998 998 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
999 999 reltarget = repo.pathto(abstarget, cwd)
1000 1000 target = repo.wjoin(abstarget)
1001 1001 src = repo.wjoin(abssrc)
1002 1002 state = repo.dirstate[abstarget]
1003 1003
1004 1004 scmutil.checkportable(ui, abstarget)
1005 1005
1006 1006 # check for collisions
1007 1007 prevsrc = targets.get(abstarget)
1008 1008 if prevsrc is not None:
1009 1009 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1010 1010 (reltarget, repo.pathto(abssrc, cwd),
1011 1011 repo.pathto(prevsrc, cwd)))
1012 1012 return
1013 1013
1014 1014 # check for overwrites
1015 1015 exists = os.path.lexists(target)
1016 1016 samefile = False
1017 1017 if exists and abssrc != abstarget:
1018 1018 if (repo.dirstate.normalize(abssrc) ==
1019 1019 repo.dirstate.normalize(abstarget)):
1020 1020 if not rename:
1021 1021 ui.warn(_("%s: can't copy - same file\n") % reltarget)
1022 1022 return
1023 1023 exists = False
1024 1024 samefile = True
1025 1025
1026 1026 if not after and exists or after and state in 'mn':
1027 1027 if not opts['force']:
1028 1028 if state in 'mn':
1029 1029 msg = _('%s: not overwriting - file already committed\n')
1030 1030 if after:
1031 1031 flags = '--after --force'
1032 1032 else:
1033 1033 flags = '--force'
1034 1034 if rename:
1035 1035 hint = _('(hg rename %s to replace the file by '
1036 1036 'recording a rename)\n') % flags
1037 1037 else:
1038 1038 hint = _('(hg copy %s to replace the file by '
1039 1039 'recording a copy)\n') % flags
1040 1040 else:
1041 1041 msg = _('%s: not overwriting - file exists\n')
1042 1042 if rename:
1043 1043 hint = _('(hg rename --after to record the rename)\n')
1044 1044 else:
1045 1045 hint = _('(hg copy --after to record the copy)\n')
1046 1046 ui.warn(msg % reltarget)
1047 1047 ui.warn(hint)
1048 1048 return
1049 1049
1050 1050 if after:
1051 1051 if not exists:
1052 1052 if rename:
1053 1053 ui.warn(_('%s: not recording move - %s does not exist\n') %
1054 1054 (relsrc, reltarget))
1055 1055 else:
1056 1056 ui.warn(_('%s: not recording copy - %s does not exist\n') %
1057 1057 (relsrc, reltarget))
1058 1058 return
1059 1059 elif not dryrun:
1060 1060 try:
1061 1061 if exists:
1062 1062 os.unlink(target)
1063 1063 targetdir = os.path.dirname(target) or '.'
1064 1064 if not os.path.isdir(targetdir):
1065 1065 os.makedirs(targetdir)
1066 1066 if samefile:
1067 1067 tmp = target + "~hgrename"
1068 1068 os.rename(src, tmp)
1069 1069 os.rename(tmp, target)
1070 1070 else:
1071 1071 util.copyfile(src, target)
1072 1072 srcexists = True
1073 1073 except IOError as inst:
1074 1074 if inst.errno == errno.ENOENT:
1075 1075 ui.warn(_('%s: deleted in working directory\n') % relsrc)
1076 1076 srcexists = False
1077 1077 else:
1078 1078 ui.warn(_('%s: cannot copy - %s\n') %
1079 1079 (relsrc, encoding.strtolocal(inst.strerror)))
1080 1080 return True # report a failure
1081 1081
1082 1082 if ui.verbose or not exact:
1083 1083 if rename:
1084 1084 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
1085 1085 else:
1086 1086 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1087 1087
1088 1088 targets[abstarget] = abssrc
1089 1089
1090 1090 # fix up dirstate
1091 1091 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
1092 1092 dryrun=dryrun, cwd=cwd)
1093 1093 if rename and not dryrun:
1094 1094 if not after and srcexists and not samefile:
1095 1095 repo.wvfs.unlinkpath(abssrc)
1096 1096 wctx.forget([abssrc])
1097 1097
1098 1098 # pat: ossep
1099 1099 # dest ossep
1100 1100 # srcs: list of (hgsep, hgsep, ossep, bool)
1101 1101 # return: function that takes hgsep and returns ossep
1102 1102 def targetpathfn(pat, dest, srcs):
1103 1103 if os.path.isdir(pat):
1104 1104 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1105 1105 abspfx = util.localpath(abspfx)
1106 1106 if destdirexists:
1107 1107 striplen = len(os.path.split(abspfx)[0])
1108 1108 else:
1109 1109 striplen = len(abspfx)
1110 1110 if striplen:
1111 1111 striplen += len(pycompat.ossep)
1112 1112 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1113 1113 elif destdirexists:
1114 1114 res = lambda p: os.path.join(dest,
1115 1115 os.path.basename(util.localpath(p)))
1116 1116 else:
1117 1117 res = lambda p: dest
1118 1118 return res
1119 1119
1120 1120 # pat: ossep
1121 1121 # dest ossep
1122 1122 # srcs: list of (hgsep, hgsep, ossep, bool)
1123 1123 # return: function that takes hgsep and returns ossep
1124 1124 def targetpathafterfn(pat, dest, srcs):
1125 1125 if matchmod.patkind(pat):
1126 1126 # a mercurial pattern
1127 1127 res = lambda p: os.path.join(dest,
1128 1128 os.path.basename(util.localpath(p)))
1129 1129 else:
1130 1130 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1131 1131 if len(abspfx) < len(srcs[0][0]):
1132 1132 # A directory. Either the target path contains the last
1133 1133 # component of the source path or it does not.
1134 1134 def evalpath(striplen):
1135 1135 score = 0
1136 1136 for s in srcs:
1137 1137 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1138 1138 if os.path.lexists(t):
1139 1139 score += 1
1140 1140 return score
1141 1141
1142 1142 abspfx = util.localpath(abspfx)
1143 1143 striplen = len(abspfx)
1144 1144 if striplen:
1145 1145 striplen += len(pycompat.ossep)
1146 1146 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1147 1147 score = evalpath(striplen)
1148 1148 striplen1 = len(os.path.split(abspfx)[0])
1149 1149 if striplen1:
1150 1150 striplen1 += len(pycompat.ossep)
1151 1151 if evalpath(striplen1) > score:
1152 1152 striplen = striplen1
1153 1153 res = lambda p: os.path.join(dest,
1154 1154 util.localpath(p)[striplen:])
1155 1155 else:
1156 1156 # a file
1157 1157 if destdirexists:
1158 1158 res = lambda p: os.path.join(dest,
1159 1159 os.path.basename(util.localpath(p)))
1160 1160 else:
1161 1161 res = lambda p: dest
1162 1162 return res
1163 1163
1164 1164 pats = scmutil.expandpats(pats)
1165 1165 if not pats:
1166 1166 raise error.Abort(_('no source or destination specified'))
1167 1167 if len(pats) == 1:
1168 1168 raise error.Abort(_('no destination specified'))
1169 1169 dest = pats.pop()
1170 1170 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1171 1171 if not destdirexists:
1172 1172 if len(pats) > 1 or matchmod.patkind(pats[0]):
1173 1173 raise error.Abort(_('with multiple sources, destination must be an '
1174 1174 'existing directory'))
1175 1175 if util.endswithsep(dest):
1176 1176 raise error.Abort(_('destination %s is not a directory') % dest)
1177 1177
1178 1178 tfn = targetpathfn
1179 1179 if after:
1180 1180 tfn = targetpathafterfn
1181 1181 copylist = []
1182 1182 for pat in pats:
1183 1183 srcs = walkpat(pat)
1184 1184 if not srcs:
1185 1185 continue
1186 1186 copylist.append((tfn(pat, dest, srcs), srcs))
1187 1187 if not copylist:
1188 1188 raise error.Abort(_('no files to copy'))
1189 1189
1190 1190 errors = 0
1191 1191 for targetpath, srcs in copylist:
1192 1192 for abssrc, relsrc, exact in srcs:
1193 1193 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1194 1194 errors += 1
1195 1195
1196 1196 if errors:
1197 1197 ui.warn(_('(consider using --after)\n'))
1198 1198
1199 1199 return errors != 0
1200 1200
1201 1201 ## facility to let extension process additional data into an import patch
1202 1202 # list of identifier to be executed in order
1203 1203 extrapreimport = [] # run before commit
1204 1204 extrapostimport = [] # run after commit
1205 1205 # mapping from identifier to actual import function
1206 1206 #
1207 1207 # 'preimport' are run before the commit is made and are provided the following
1208 1208 # arguments:
1209 1209 # - repo: the localrepository instance,
1210 1210 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1211 1211 # - extra: the future extra dictionary of the changeset, please mutate it,
1212 1212 # - opts: the import options.
1213 1213 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1214 1214 # mutation of in memory commit and more. Feel free to rework the code to get
1215 1215 # there.
1216 1216 extrapreimportmap = {}
1217 1217 # 'postimport' are run after the commit is made and are provided the following
1218 1218 # argument:
1219 1219 # - ctx: the changectx created by import.
1220 1220 extrapostimportmap = {}
1221 1221
1222 1222 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
1223 1223 """Utility function used by commands.import to import a single patch
1224 1224
1225 1225 This function is explicitly defined here to help the evolve extension to
1226 1226 wrap this part of the import logic.
1227 1227
1228 1228 The API is currently a bit ugly because it a simple code translation from
1229 1229 the import command. Feel free to make it better.
1230 1230
1231 1231 :hunk: a patch (as a binary string)
1232 1232 :parents: nodes that will be parent of the created commit
1233 1233 :opts: the full dict of option passed to the import command
1234 1234 :msgs: list to save commit message to.
1235 1235 (used in case we need to save it when failing)
1236 1236 :updatefunc: a function that update a repo to a given node
1237 1237 updatefunc(<repo>, <node>)
1238 1238 """
1239 1239 # avoid cycle context -> subrepo -> cmdutil
1240 1240 from . import context
1241 1241 extractdata = patch.extract(ui, hunk)
1242 1242 tmpname = extractdata.get('filename')
1243 1243 message = extractdata.get('message')
1244 1244 user = opts.get('user') or extractdata.get('user')
1245 1245 date = opts.get('date') or extractdata.get('date')
1246 1246 branch = extractdata.get('branch')
1247 1247 nodeid = extractdata.get('nodeid')
1248 1248 p1 = extractdata.get('p1')
1249 1249 p2 = extractdata.get('p2')
1250 1250
1251 1251 nocommit = opts.get('no_commit')
1252 1252 importbranch = opts.get('import_branch')
1253 1253 update = not opts.get('bypass')
1254 1254 strip = opts["strip"]
1255 1255 prefix = opts["prefix"]
1256 1256 sim = float(opts.get('similarity') or 0)
1257 1257 if not tmpname:
1258 1258 return (None, None, False)
1259 1259
1260 1260 rejects = False
1261 1261
1262 1262 try:
1263 1263 cmdline_message = logmessage(ui, opts)
1264 1264 if cmdline_message:
1265 1265 # pickup the cmdline msg
1266 1266 message = cmdline_message
1267 1267 elif message:
1268 1268 # pickup the patch msg
1269 1269 message = message.strip()
1270 1270 else:
1271 1271 # launch the editor
1272 1272 message = None
1273 1273 ui.debug('message:\n%s\n' % message)
1274 1274
1275 1275 if len(parents) == 1:
1276 1276 parents.append(repo[nullid])
1277 1277 if opts.get('exact'):
1278 1278 if not nodeid or not p1:
1279 1279 raise error.Abort(_('not a Mercurial patch'))
1280 1280 p1 = repo[p1]
1281 1281 p2 = repo[p2 or nullid]
1282 1282 elif p2:
1283 1283 try:
1284 1284 p1 = repo[p1]
1285 1285 p2 = repo[p2]
1286 1286 # Without any options, consider p2 only if the
1287 1287 # patch is being applied on top of the recorded
1288 1288 # first parent.
1289 1289 if p1 != parents[0]:
1290 1290 p1 = parents[0]
1291 1291 p2 = repo[nullid]
1292 1292 except error.RepoError:
1293 1293 p1, p2 = parents
1294 1294 if p2.node() == nullid:
1295 1295 ui.warn(_("warning: import the patch as a normal revision\n"
1296 1296 "(use --exact to import the patch as a merge)\n"))
1297 1297 else:
1298 1298 p1, p2 = parents
1299 1299
1300 1300 n = None
1301 1301 if update:
1302 1302 if p1 != parents[0]:
1303 1303 updatefunc(repo, p1.node())
1304 1304 if p2 != parents[1]:
1305 1305 repo.setparents(p1.node(), p2.node())
1306 1306
1307 1307 if opts.get('exact') or importbranch:
1308 1308 repo.dirstate.setbranch(branch or 'default')
1309 1309
1310 1310 partial = opts.get('partial', False)
1311 1311 files = set()
1312 1312 try:
1313 1313 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1314 1314 files=files, eolmode=None, similarity=sim / 100.0)
1315 1315 except error.PatchError as e:
1316 1316 if not partial:
1317 1317 raise error.Abort(str(e))
1318 1318 if partial:
1319 1319 rejects = True
1320 1320
1321 1321 files = list(files)
1322 1322 if nocommit:
1323 1323 if message:
1324 1324 msgs.append(message)
1325 1325 else:
1326 1326 if opts.get('exact') or p2:
1327 1327 # If you got here, you either use --force and know what
1328 1328 # you are doing or used --exact or a merge patch while
1329 1329 # being updated to its first parent.
1330 1330 m = None
1331 1331 else:
1332 1332 m = scmutil.matchfiles(repo, files or [])
1333 1333 editform = mergeeditform(repo[None], 'import.normal')
1334 1334 if opts.get('exact'):
1335 1335 editor = None
1336 1336 else:
1337 1337 editor = getcommiteditor(editform=editform, **opts)
1338 1338 extra = {}
1339 1339 for idfunc in extrapreimport:
1340 1340 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1341 1341 overrides = {}
1342 1342 if partial:
1343 1343 overrides[('ui', 'allowemptycommit')] = True
1344 1344 with repo.ui.configoverride(overrides, 'import'):
1345 1345 n = repo.commit(message, user,
1346 1346 date, match=m,
1347 1347 editor=editor, extra=extra)
1348 1348 for idfunc in extrapostimport:
1349 1349 extrapostimportmap[idfunc](repo[n])
1350 1350 else:
1351 1351 if opts.get('exact') or importbranch:
1352 1352 branch = branch or 'default'
1353 1353 else:
1354 1354 branch = p1.branch()
1355 1355 store = patch.filestore()
1356 1356 try:
1357 1357 files = set()
1358 1358 try:
1359 1359 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1360 1360 files, eolmode=None)
1361 1361 except error.PatchError as e:
1362 1362 raise error.Abort(str(e))
1363 1363 if opts.get('exact'):
1364 1364 editor = None
1365 1365 else:
1366 1366 editor = getcommiteditor(editform='import.bypass')
1367 1367 memctx = context.memctx(repo, (p1.node(), p2.node()),
1368 1368 message,
1369 1369 files=files,
1370 1370 filectxfn=store,
1371 1371 user=user,
1372 1372 date=date,
1373 1373 branch=branch,
1374 1374 editor=editor)
1375 1375 n = memctx.commit()
1376 1376 finally:
1377 1377 store.close()
1378 1378 if opts.get('exact') and nocommit:
1379 1379 # --exact with --no-commit is still useful in that it does merge
1380 1380 # and branch bits
1381 1381 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1382 1382 elif opts.get('exact') and hex(n) != nodeid:
1383 1383 raise error.Abort(_('patch is damaged or loses information'))
1384 1384 msg = _('applied to working directory')
1385 1385 if n:
1386 1386 # i18n: refers to a short changeset id
1387 1387 msg = _('created %s') % short(n)
1388 1388 return (msg, n, rejects)
1389 1389 finally:
1390 1390 os.unlink(tmpname)
1391 1391
1392 1392 # facility to let extensions include additional data in an exported patch
1393 1393 # list of identifiers to be executed in order
1394 1394 extraexport = []
1395 1395 # mapping from identifier to actual export function
1396 1396 # function as to return a string to be added to the header or None
1397 1397 # it is given two arguments (sequencenumber, changectx)
1398 1398 extraexportmap = {}
1399 1399
1400 1400 def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts):
1401 1401 node = scmutil.binnode(ctx)
1402 1402 parents = [p.node() for p in ctx.parents() if p]
1403 1403 branch = ctx.branch()
1404 1404 if switch_parent:
1405 1405 parents.reverse()
1406 1406
1407 1407 if parents:
1408 1408 prev = parents[0]
1409 1409 else:
1410 1410 prev = nullid
1411 1411
1412 1412 write("# HG changeset patch\n")
1413 1413 write("# User %s\n" % ctx.user())
1414 1414 write("# Date %d %d\n" % ctx.date())
1415 1415 write("# %s\n" % util.datestr(ctx.date()))
1416 1416 if branch and branch != 'default':
1417 1417 write("# Branch %s\n" % branch)
1418 1418 write("# Node ID %s\n" % hex(node))
1419 1419 write("# Parent %s\n" % hex(prev))
1420 1420 if len(parents) > 1:
1421 1421 write("# Parent %s\n" % hex(parents[1]))
1422 1422
1423 1423 for headerid in extraexport:
1424 1424 header = extraexportmap[headerid](seqno, ctx)
1425 1425 if header is not None:
1426 1426 write('# %s\n' % header)
1427 1427 write(ctx.description().rstrip())
1428 1428 write("\n\n")
1429 1429
1430 1430 for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts):
1431 1431 write(chunk, label=label)
1432 1432
1433 1433 def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False,
1434 1434 opts=None, match=None):
1435 1435 '''export changesets as hg patches
1436 1436
1437 1437 Args:
1438 1438 repo: The repository from which we're exporting revisions.
1439 1439 revs: A list of revisions to export as revision numbers.
1440 1440 fntemplate: An optional string to use for generating patch file names.
1441 1441 fp: An optional file-like object to which patches should be written.
1442 1442 switch_parent: If True, show diffs against second parent when not nullid.
1443 1443 Default is false, which always shows diff against p1.
1444 1444 opts: diff options to use for generating the patch.
1445 1445 match: If specified, only export changes to files matching this matcher.
1446 1446
1447 1447 Returns:
1448 1448 Nothing.
1449 1449
1450 1450 Side Effect:
1451 1451 "HG Changeset Patch" data is emitted to one of the following
1452 1452 destinations:
1453 1453 fp is specified: All revs are written to the specified
1454 1454 file-like object.
1455 1455 fntemplate specified: Each rev is written to a unique file named using
1456 1456 the given template.
1457 1457 Neither fp nor template specified: All revs written to repo.ui.write()
1458 1458 '''
1459 1459
1460 1460 total = len(revs)
1461 1461 revwidth = max(len(str(rev)) for rev in revs)
1462 1462 filemode = {}
1463 1463
1464 1464 write = None
1465 1465 dest = '<unnamed>'
1466 1466 if fp:
1467 1467 dest = getattr(fp, 'name', dest)
1468 1468 def write(s, **kw):
1469 1469 fp.write(s)
1470 1470 elif not fntemplate:
1471 1471 write = repo.ui.write
1472 1472
1473 1473 for seqno, rev in enumerate(revs, 1):
1474 1474 ctx = repo[rev]
1475 1475 fo = None
1476 1476 if not fp and fntemplate:
1477 1477 desc_lines = ctx.description().rstrip().split('\n')
1478 1478 desc = desc_lines[0] #Commit always has a first line.
1479 1479 fo = makefileobj(repo, fntemplate, ctx.node(), desc=desc,
1480 1480 total=total, seqno=seqno, revwidth=revwidth,
1481 1481 mode='wb', modemap=filemode)
1482 1482 dest = fo.name
1483 1483 def write(s, **kw):
1484 1484 fo.write(s)
1485 1485 if not dest.startswith('<'):
1486 1486 repo.ui.note("%s\n" % dest)
1487 1487 _exportsingle(
1488 1488 repo, ctx, match, switch_parent, rev, seqno, write, opts)
1489 1489 if fo is not None:
1490 1490 fo.close()
1491 1491
1492 1492 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1493 1493 changes=None, stat=False, fp=None, prefix='',
1494 1494 root='', listsubrepos=False, hunksfilterfn=None):
1495 1495 '''show diff or diffstat.'''
1496 1496 if fp is None:
1497 1497 write = ui.write
1498 1498 else:
1499 1499 def write(s, **kw):
1500 1500 fp.write(s)
1501 1501
1502 1502 if root:
1503 1503 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1504 1504 else:
1505 1505 relroot = ''
1506 1506 if relroot != '':
1507 1507 # XXX relative roots currently don't work if the root is within a
1508 1508 # subrepo
1509 1509 uirelroot = match.uipath(relroot)
1510 1510 relroot += '/'
1511 1511 for matchroot in match.files():
1512 1512 if not matchroot.startswith(relroot):
1513 1513 ui.warn(_('warning: %s not inside relative root %s\n') % (
1514 1514 match.uipath(matchroot), uirelroot))
1515 1515
1516 1516 if stat:
1517 1517 diffopts = diffopts.copy(context=0)
1518 1518 width = 80
1519 1519 if not ui.plain():
1520 1520 width = ui.termwidth()
1521 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1521 chunks = patch.diff(repo, node1, node2, match, changes, opts=diffopts,
1522 1522 prefix=prefix, relroot=relroot,
1523 1523 hunksfilterfn=hunksfilterfn)
1524 1524 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1525 1525 width=width):
1526 1526 write(chunk, label=label)
1527 1527 else:
1528 1528 for chunk, label in patch.diffui(repo, node1, node2, match,
1529 changes, diffopts, prefix=prefix,
1529 changes, opts=diffopts, prefix=prefix,
1530 1530 relroot=relroot,
1531 1531 hunksfilterfn=hunksfilterfn):
1532 1532 write(chunk, label=label)
1533 1533
1534 1534 if listsubrepos:
1535 1535 ctx1 = repo[node1]
1536 1536 ctx2 = repo[node2]
1537 1537 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1538 1538 tempnode2 = node2
1539 1539 try:
1540 1540 if node2 is not None:
1541 1541 tempnode2 = ctx2.substate[subpath][1]
1542 1542 except KeyError:
1543 1543 # A subrepo that existed in node1 was deleted between node1 and
1544 1544 # node2 (inclusive). Thus, ctx2's substate won't contain that
1545 1545 # subpath. The best we can do is to ignore it.
1546 1546 tempnode2 = None
1547 1547 submatch = matchmod.subdirmatcher(subpath, match)
1548 1548 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1549 1549 stat=stat, fp=fp, prefix=prefix)
1550 1550
1551 1551 def _changesetlabels(ctx):
1552 1552 labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()]
1553 1553 if ctx.obsolete():
1554 1554 labels.append('changeset.obsolete')
1555 1555 if ctx.isunstable():
1556 1556 labels.append('changeset.unstable')
1557 1557 for instability in ctx.instabilities():
1558 1558 labels.append('instability.%s' % instability)
1559 1559 return ' '.join(labels)
1560 1560
1561 1561 class changeset_printer(object):
1562 1562 '''show changeset information when templating not requested.'''
1563 1563
1564 1564 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1565 1565 self.ui = ui
1566 1566 self.repo = repo
1567 1567 self.buffered = buffered
1568 1568 self.matchfn = matchfn
1569 1569 self.diffopts = diffopts
1570 1570 self.header = {}
1571 1571 self.hunk = {}
1572 1572 self.lastheader = None
1573 1573 self.footer = None
1574 1574 self._columns = templatekw.getlogcolumns()
1575 1575
1576 1576 def flush(self, ctx):
1577 1577 rev = ctx.rev()
1578 1578 if rev in self.header:
1579 1579 h = self.header[rev]
1580 1580 if h != self.lastheader:
1581 1581 self.lastheader = h
1582 1582 self.ui.write(h)
1583 1583 del self.header[rev]
1584 1584 if rev in self.hunk:
1585 1585 self.ui.write(self.hunk[rev])
1586 1586 del self.hunk[rev]
1587 1587 return 1
1588 1588 return 0
1589 1589
1590 1590 def close(self):
1591 1591 if self.footer:
1592 1592 self.ui.write(self.footer)
1593 1593
1594 1594 def show(self, ctx, copies=None, matchfn=None, hunksfilterfn=None,
1595 1595 **props):
1596 1596 props = pycompat.byteskwargs(props)
1597 1597 if self.buffered:
1598 1598 self.ui.pushbuffer(labeled=True)
1599 1599 self._show(ctx, copies, matchfn, hunksfilterfn, props)
1600 1600 self.hunk[ctx.rev()] = self.ui.popbuffer()
1601 1601 else:
1602 1602 self._show(ctx, copies, matchfn, hunksfilterfn, props)
1603 1603
1604 1604 def _show(self, ctx, copies, matchfn, hunksfilterfn, props):
1605 1605 '''show a single changeset or file revision'''
1606 1606 changenode = ctx.node()
1607 1607 rev = ctx.rev()
1608 1608
1609 1609 if self.ui.quiet:
1610 1610 self.ui.write("%s\n" % scmutil.formatchangeid(ctx),
1611 1611 label='log.node')
1612 1612 return
1613 1613
1614 1614 columns = self._columns
1615 1615 self.ui.write(columns['changeset'] % scmutil.formatchangeid(ctx),
1616 1616 label=_changesetlabels(ctx))
1617 1617
1618 1618 # branches are shown first before any other names due to backwards
1619 1619 # compatibility
1620 1620 branch = ctx.branch()
1621 1621 # don't show the default branch name
1622 1622 if branch != 'default':
1623 1623 self.ui.write(columns['branch'] % branch, label='log.branch')
1624 1624
1625 1625 for nsname, ns in self.repo.names.iteritems():
1626 1626 # branches has special logic already handled above, so here we just
1627 1627 # skip it
1628 1628 if nsname == 'branches':
1629 1629 continue
1630 1630 # we will use the templatename as the color name since those two
1631 1631 # should be the same
1632 1632 for name in ns.names(self.repo, changenode):
1633 1633 self.ui.write(ns.logfmt % name,
1634 1634 label='log.%s' % ns.colorname)
1635 1635 if self.ui.debugflag:
1636 1636 self.ui.write(columns['phase'] % ctx.phasestr(), label='log.phase')
1637 1637 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1638 1638 label = 'log.parent changeset.%s' % pctx.phasestr()
1639 1639 self.ui.write(columns['parent'] % scmutil.formatchangeid(pctx),
1640 1640 label=label)
1641 1641
1642 1642 if self.ui.debugflag and rev is not None:
1643 1643 mnode = ctx.manifestnode()
1644 1644 mrev = self.repo.manifestlog._revlog.rev(mnode)
1645 1645 self.ui.write(columns['manifest']
1646 1646 % scmutil.formatrevnode(self.ui, mrev, mnode),
1647 1647 label='ui.debug log.manifest')
1648 1648 self.ui.write(columns['user'] % ctx.user(), label='log.user')
1649 1649 self.ui.write(columns['date'] % util.datestr(ctx.date()),
1650 1650 label='log.date')
1651 1651
1652 1652 if ctx.isunstable():
1653 1653 instabilities = ctx.instabilities()
1654 1654 self.ui.write(columns['instability'] % ', '.join(instabilities),
1655 1655 label='log.instability')
1656 1656
1657 1657 elif ctx.obsolete():
1658 1658 self._showobsfate(ctx)
1659 1659
1660 1660 self._exthook(ctx)
1661 1661
1662 1662 if self.ui.debugflag:
1663 1663 files = ctx.p1().status(ctx)[:3]
1664 1664 for key, value in zip(['files', 'files+', 'files-'], files):
1665 1665 if value:
1666 1666 self.ui.write(columns[key] % " ".join(value),
1667 1667 label='ui.debug log.files')
1668 1668 elif ctx.files() and self.ui.verbose:
1669 1669 self.ui.write(columns['files'] % " ".join(ctx.files()),
1670 1670 label='ui.note log.files')
1671 1671 if copies and self.ui.verbose:
1672 1672 copies = ['%s (%s)' % c for c in copies]
1673 1673 self.ui.write(columns['copies'] % ' '.join(copies),
1674 1674 label='ui.note log.copies')
1675 1675
1676 1676 extra = ctx.extra()
1677 1677 if extra and self.ui.debugflag:
1678 1678 for key, value in sorted(extra.items()):
1679 1679 self.ui.write(columns['extra'] % (key, util.escapestr(value)),
1680 1680 label='ui.debug log.extra')
1681 1681
1682 1682 description = ctx.description().strip()
1683 1683 if description:
1684 1684 if self.ui.verbose:
1685 1685 self.ui.write(_("description:\n"),
1686 1686 label='ui.note log.description')
1687 1687 self.ui.write(description,
1688 1688 label='ui.note log.description')
1689 1689 self.ui.write("\n\n")
1690 1690 else:
1691 1691 self.ui.write(columns['summary'] % description.splitlines()[0],
1692 1692 label='log.summary')
1693 1693 self.ui.write("\n")
1694 1694
1695 1695 self.showpatch(ctx, matchfn, hunksfilterfn=hunksfilterfn)
1696 1696
1697 1697 def _showobsfate(self, ctx):
1698 1698 obsfate = templatekw.showobsfate(repo=self.repo, ctx=ctx, ui=self.ui)
1699 1699
1700 1700 if obsfate:
1701 1701 for obsfateline in obsfate:
1702 1702 self.ui.write(self._columns['obsolete'] % obsfateline,
1703 1703 label='log.obsfate')
1704 1704
1705 1705 def _exthook(self, ctx):
1706 1706 '''empty method used by extension as a hook point
1707 1707 '''
1708 1708
1709 1709 def showpatch(self, ctx, matchfn, hunksfilterfn=None):
1710 1710 if not matchfn:
1711 1711 matchfn = self.matchfn
1712 1712 if matchfn:
1713 1713 stat = self.diffopts.get('stat')
1714 1714 diff = self.diffopts.get('patch')
1715 1715 diffopts = patch.diffallopts(self.ui, self.diffopts)
1716 1716 node = ctx.node()
1717 1717 prev = ctx.p1().node()
1718 1718 if stat:
1719 1719 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1720 1720 match=matchfn, stat=True,
1721 1721 hunksfilterfn=hunksfilterfn)
1722 1722 if diff:
1723 1723 if stat:
1724 1724 self.ui.write("\n")
1725 1725 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1726 1726 match=matchfn, stat=False,
1727 1727 hunksfilterfn=hunksfilterfn)
1728 1728 self.ui.write("\n")
1729 1729
1730 1730 class jsonchangeset(changeset_printer):
1731 1731 '''format changeset information.'''
1732 1732
1733 1733 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1734 1734 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1735 1735 self.cache = {}
1736 1736 self._first = True
1737 1737
1738 1738 def close(self):
1739 1739 if not self._first:
1740 1740 self.ui.write("\n]\n")
1741 1741 else:
1742 1742 self.ui.write("[]\n")
1743 1743
1744 1744 def _show(self, ctx, copies, matchfn, hunksfilterfn, props):
1745 1745 '''show a single changeset or file revision'''
1746 1746 rev = ctx.rev()
1747 1747 if rev is None:
1748 1748 jrev = jnode = 'null'
1749 1749 else:
1750 1750 jrev = '%d' % rev
1751 1751 jnode = '"%s"' % hex(ctx.node())
1752 1752 j = encoding.jsonescape
1753 1753
1754 1754 if self._first:
1755 1755 self.ui.write("[\n {")
1756 1756 self._first = False
1757 1757 else:
1758 1758 self.ui.write(",\n {")
1759 1759
1760 1760 if self.ui.quiet:
1761 1761 self.ui.write(('\n "rev": %s') % jrev)
1762 1762 self.ui.write((',\n "node": %s') % jnode)
1763 1763 self.ui.write('\n }')
1764 1764 return
1765 1765
1766 1766 self.ui.write(('\n "rev": %s') % jrev)
1767 1767 self.ui.write((',\n "node": %s') % jnode)
1768 1768 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1769 1769 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1770 1770 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1771 1771 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1772 1772 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1773 1773
1774 1774 self.ui.write((',\n "bookmarks": [%s]') %
1775 1775 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1776 1776 self.ui.write((',\n "tags": [%s]') %
1777 1777 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1778 1778 self.ui.write((',\n "parents": [%s]') %
1779 1779 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1780 1780
1781 1781 if self.ui.debugflag:
1782 1782 if rev is None:
1783 1783 jmanifestnode = 'null'
1784 1784 else:
1785 1785 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1786 1786 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1787 1787
1788 1788 self.ui.write((',\n "extra": {%s}') %
1789 1789 ", ".join('"%s": "%s"' % (j(k), j(v))
1790 1790 for k, v in ctx.extra().items()))
1791 1791
1792 1792 files = ctx.p1().status(ctx)
1793 1793 self.ui.write((',\n "modified": [%s]') %
1794 1794 ", ".join('"%s"' % j(f) for f in files[0]))
1795 1795 self.ui.write((',\n "added": [%s]') %
1796 1796 ", ".join('"%s"' % j(f) for f in files[1]))
1797 1797 self.ui.write((',\n "removed": [%s]') %
1798 1798 ", ".join('"%s"' % j(f) for f in files[2]))
1799 1799
1800 1800 elif self.ui.verbose:
1801 1801 self.ui.write((',\n "files": [%s]') %
1802 1802 ", ".join('"%s"' % j(f) for f in ctx.files()))
1803 1803
1804 1804 if copies:
1805 1805 self.ui.write((',\n "copies": {%s}') %
1806 1806 ", ".join('"%s": "%s"' % (j(k), j(v))
1807 1807 for k, v in copies))
1808 1808
1809 1809 matchfn = self.matchfn
1810 1810 if matchfn:
1811 1811 stat = self.diffopts.get('stat')
1812 1812 diff = self.diffopts.get('patch')
1813 1813 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1814 1814 node, prev = ctx.node(), ctx.p1().node()
1815 1815 if stat:
1816 1816 self.ui.pushbuffer()
1817 1817 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1818 1818 match=matchfn, stat=True)
1819 1819 self.ui.write((',\n "diffstat": "%s"')
1820 1820 % j(self.ui.popbuffer()))
1821 1821 if diff:
1822 1822 self.ui.pushbuffer()
1823 1823 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1824 1824 match=matchfn, stat=False)
1825 1825 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1826 1826
1827 1827 self.ui.write("\n }")
1828 1828
1829 1829 class changeset_templater(changeset_printer):
1830 1830 '''format changeset information.
1831 1831
1832 1832 Note: there are a variety of convenience functions to build a
1833 1833 changeset_templater for common cases. See functions such as:
1834 1834 makelogtemplater, show_changeset, buildcommittemplate, or other
1835 1835 functions that use changesest_templater.
1836 1836 '''
1837 1837
1838 1838 # Arguments before "buffered" used to be positional. Consider not
1839 1839 # adding/removing arguments before "buffered" to not break callers.
1840 1840 def __init__(self, ui, repo, tmplspec, matchfn=None, diffopts=None,
1841 1841 buffered=False):
1842 1842 diffopts = diffopts or {}
1843 1843
1844 1844 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1845 1845 self.t = formatter.loadtemplater(ui, tmplspec,
1846 1846 cache=templatekw.defaulttempl)
1847 1847 self._counter = itertools.count()
1848 1848 self.cache = {}
1849 1849
1850 1850 self._tref = tmplspec.ref
1851 1851 self._parts = {'header': '', 'footer': '',
1852 1852 tmplspec.ref: tmplspec.ref,
1853 1853 'docheader': '', 'docfooter': '',
1854 1854 'separator': ''}
1855 1855 if tmplspec.mapfile:
1856 1856 # find correct templates for current mode, for backward
1857 1857 # compatibility with 'log -v/-q/--debug' using a mapfile
1858 1858 tmplmodes = [
1859 1859 (True, ''),
1860 1860 (self.ui.verbose, '_verbose'),
1861 1861 (self.ui.quiet, '_quiet'),
1862 1862 (self.ui.debugflag, '_debug'),
1863 1863 ]
1864 1864 for mode, postfix in tmplmodes:
1865 1865 for t in self._parts:
1866 1866 cur = t + postfix
1867 1867 if mode and cur in self.t:
1868 1868 self._parts[t] = cur
1869 1869 else:
1870 1870 partnames = [p for p in self._parts.keys() if p != tmplspec.ref]
1871 1871 m = formatter.templatepartsmap(tmplspec, self.t, partnames)
1872 1872 self._parts.update(m)
1873 1873
1874 1874 if self._parts['docheader']:
1875 1875 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1876 1876
1877 1877 def close(self):
1878 1878 if self._parts['docfooter']:
1879 1879 if not self.footer:
1880 1880 self.footer = ""
1881 1881 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1882 1882 return super(changeset_templater, self).close()
1883 1883
1884 1884 def _show(self, ctx, copies, matchfn, hunksfilterfn, props):
1885 1885 '''show a single changeset or file revision'''
1886 1886 props = props.copy()
1887 1887 props.update(templatekw.keywords)
1888 1888 props['templ'] = self.t
1889 1889 props['ctx'] = ctx
1890 1890 props['repo'] = self.repo
1891 1891 props['ui'] = self.repo.ui
1892 1892 props['index'] = index = next(self._counter)
1893 1893 props['revcache'] = {'copies': copies}
1894 1894 props['cache'] = self.cache
1895 1895 props = pycompat.strkwargs(props)
1896 1896
1897 1897 # write separator, which wouldn't work well with the header part below
1898 1898 # since there's inherently a conflict between header (across items) and
1899 1899 # separator (per item)
1900 1900 if self._parts['separator'] and index > 0:
1901 1901 self.ui.write(templater.stringify(self.t(self._parts['separator'])))
1902 1902
1903 1903 # write header
1904 1904 if self._parts['header']:
1905 1905 h = templater.stringify(self.t(self._parts['header'], **props))
1906 1906 if self.buffered:
1907 1907 self.header[ctx.rev()] = h
1908 1908 else:
1909 1909 if self.lastheader != h:
1910 1910 self.lastheader = h
1911 1911 self.ui.write(h)
1912 1912
1913 1913 # write changeset metadata, then patch if requested
1914 1914 key = self._parts[self._tref]
1915 1915 self.ui.write(templater.stringify(self.t(key, **props)))
1916 1916 self.showpatch(ctx, matchfn, hunksfilterfn=hunksfilterfn)
1917 1917
1918 1918 if self._parts['footer']:
1919 1919 if not self.footer:
1920 1920 self.footer = templater.stringify(
1921 1921 self.t(self._parts['footer'], **props))
1922 1922
1923 1923 def logtemplatespec(tmpl, mapfile):
1924 1924 if mapfile:
1925 1925 return formatter.templatespec('changeset', tmpl, mapfile)
1926 1926 else:
1927 1927 return formatter.templatespec('', tmpl, None)
1928 1928
1929 1929 def _lookuplogtemplate(ui, tmpl, style):
1930 1930 """Find the template matching the given template spec or style
1931 1931
1932 1932 See formatter.lookuptemplate() for details.
1933 1933 """
1934 1934
1935 1935 # ui settings
1936 1936 if not tmpl and not style: # template are stronger than style
1937 1937 tmpl = ui.config('ui', 'logtemplate')
1938 1938 if tmpl:
1939 1939 return logtemplatespec(templater.unquotestring(tmpl), None)
1940 1940 else:
1941 1941 style = util.expandpath(ui.config('ui', 'style'))
1942 1942
1943 1943 if not tmpl and style:
1944 1944 mapfile = style
1945 1945 if not os.path.split(mapfile)[0]:
1946 1946 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1947 1947 or templater.templatepath(mapfile))
1948 1948 if mapname:
1949 1949 mapfile = mapname
1950 1950 return logtemplatespec(None, mapfile)
1951 1951
1952 1952 if not tmpl:
1953 1953 return logtemplatespec(None, None)
1954 1954
1955 1955 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1956 1956
1957 1957 def makelogtemplater(ui, repo, tmpl, buffered=False):
1958 1958 """Create a changeset_templater from a literal template 'tmpl'
1959 1959 byte-string."""
1960 1960 spec = logtemplatespec(tmpl, None)
1961 1961 return changeset_templater(ui, repo, spec, buffered=buffered)
1962 1962
1963 1963 def show_changeset(ui, repo, opts, buffered=False):
1964 1964 """show one changeset using template or regular display.
1965 1965
1966 1966 Display format will be the first non-empty hit of:
1967 1967 1. option 'template'
1968 1968 2. option 'style'
1969 1969 3. [ui] setting 'logtemplate'
1970 1970 4. [ui] setting 'style'
1971 1971 If all of these values are either the unset or the empty string,
1972 1972 regular display via changeset_printer() is done.
1973 1973 """
1974 1974 # options
1975 1975 match = None
1976 1976 if opts.get('patch') or opts.get('stat'):
1977 1977 match = scmutil.matchall(repo)
1978 1978
1979 1979 if opts.get('template') == 'json':
1980 1980 return jsonchangeset(ui, repo, match, opts, buffered)
1981 1981
1982 1982 spec = _lookuplogtemplate(ui, opts.get('template'), opts.get('style'))
1983 1983
1984 1984 if not spec.ref and not spec.tmpl and not spec.mapfile:
1985 1985 return changeset_printer(ui, repo, match, opts, buffered)
1986 1986
1987 1987 return changeset_templater(ui, repo, spec, match, opts, buffered)
1988 1988
1989 1989 def showmarker(fm, marker, index=None):
1990 1990 """utility function to display obsolescence marker in a readable way
1991 1991
1992 1992 To be used by debug function."""
1993 1993 if index is not None:
1994 1994 fm.write('index', '%i ', index)
1995 1995 fm.write('prednode', '%s ', hex(marker.prednode()))
1996 1996 succs = marker.succnodes()
1997 1997 fm.condwrite(succs, 'succnodes', '%s ',
1998 1998 fm.formatlist(map(hex, succs), name='node'))
1999 1999 fm.write('flag', '%X ', marker.flags())
2000 2000 parents = marker.parentnodes()
2001 2001 if parents is not None:
2002 2002 fm.write('parentnodes', '{%s} ',
2003 2003 fm.formatlist(map(hex, parents), name='node', sep=', '))
2004 2004 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
2005 2005 meta = marker.metadata().copy()
2006 2006 meta.pop('date', None)
2007 2007 fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
2008 2008 fm.plain('\n')
2009 2009
2010 2010 def finddate(ui, repo, date):
2011 2011 """Find the tipmost changeset that matches the given date spec"""
2012 2012
2013 2013 df = util.matchdate(date)
2014 2014 m = scmutil.matchall(repo)
2015 2015 results = {}
2016 2016
2017 2017 def prep(ctx, fns):
2018 2018 d = ctx.date()
2019 2019 if df(d[0]):
2020 2020 results[ctx.rev()] = d
2021 2021
2022 2022 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
2023 2023 rev = ctx.rev()
2024 2024 if rev in results:
2025 2025 ui.status(_("found revision %s from %s\n") %
2026 2026 (rev, util.datestr(results[rev])))
2027 2027 return '%d' % rev
2028 2028
2029 2029 raise error.Abort(_("revision matching date not found"))
2030 2030
2031 2031 def increasingwindows(windowsize=8, sizelimit=512):
2032 2032 while True:
2033 2033 yield windowsize
2034 2034 if windowsize < sizelimit:
2035 2035 windowsize *= 2
2036 2036
2037 2037 class FileWalkError(Exception):
2038 2038 pass
2039 2039
2040 2040 def walkfilerevs(repo, match, follow, revs, fncache):
2041 2041 '''Walks the file history for the matched files.
2042 2042
2043 2043 Returns the changeset revs that are involved in the file history.
2044 2044
2045 2045 Throws FileWalkError if the file history can't be walked using
2046 2046 filelogs alone.
2047 2047 '''
2048 2048 wanted = set()
2049 2049 copies = []
2050 2050 minrev, maxrev = min(revs), max(revs)
2051 2051 def filerevgen(filelog, last):
2052 2052 """
2053 2053 Only files, no patterns. Check the history of each file.
2054 2054
2055 2055 Examines filelog entries within minrev, maxrev linkrev range
2056 2056 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
2057 2057 tuples in backwards order
2058 2058 """
2059 2059 cl_count = len(repo)
2060 2060 revs = []
2061 2061 for j in xrange(0, last + 1):
2062 2062 linkrev = filelog.linkrev(j)
2063 2063 if linkrev < minrev:
2064 2064 continue
2065 2065 # only yield rev for which we have the changelog, it can
2066 2066 # happen while doing "hg log" during a pull or commit
2067 2067 if linkrev >= cl_count:
2068 2068 break
2069 2069
2070 2070 parentlinkrevs = []
2071 2071 for p in filelog.parentrevs(j):
2072 2072 if p != nullrev:
2073 2073 parentlinkrevs.append(filelog.linkrev(p))
2074 2074 n = filelog.node(j)
2075 2075 revs.append((linkrev, parentlinkrevs,
2076 2076 follow and filelog.renamed(n)))
2077 2077
2078 2078 return reversed(revs)
2079 2079 def iterfiles():
2080 2080 pctx = repo['.']
2081 2081 for filename in match.files():
2082 2082 if follow:
2083 2083 if filename not in pctx:
2084 2084 raise error.Abort(_('cannot follow file not in parent '
2085 2085 'revision: "%s"') % filename)
2086 2086 yield filename, pctx[filename].filenode()
2087 2087 else:
2088 2088 yield filename, None
2089 2089 for filename_node in copies:
2090 2090 yield filename_node
2091 2091
2092 2092 for file_, node in iterfiles():
2093 2093 filelog = repo.file(file_)
2094 2094 if not len(filelog):
2095 2095 if node is None:
2096 2096 # A zero count may be a directory or deleted file, so
2097 2097 # try to find matching entries on the slow path.
2098 2098 if follow:
2099 2099 raise error.Abort(
2100 2100 _('cannot follow nonexistent file: "%s"') % file_)
2101 2101 raise FileWalkError("Cannot walk via filelog")
2102 2102 else:
2103 2103 continue
2104 2104
2105 2105 if node is None:
2106 2106 last = len(filelog) - 1
2107 2107 else:
2108 2108 last = filelog.rev(node)
2109 2109
2110 2110 # keep track of all ancestors of the file
2111 2111 ancestors = {filelog.linkrev(last)}
2112 2112
2113 2113 # iterate from latest to oldest revision
2114 2114 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
2115 2115 if not follow:
2116 2116 if rev > maxrev:
2117 2117 continue
2118 2118 else:
2119 2119 # Note that last might not be the first interesting
2120 2120 # rev to us:
2121 2121 # if the file has been changed after maxrev, we'll
2122 2122 # have linkrev(last) > maxrev, and we still need
2123 2123 # to explore the file graph
2124 2124 if rev not in ancestors:
2125 2125 continue
2126 2126 # XXX insert 1327 fix here
2127 2127 if flparentlinkrevs:
2128 2128 ancestors.update(flparentlinkrevs)
2129 2129
2130 2130 fncache.setdefault(rev, []).append(file_)
2131 2131 wanted.add(rev)
2132 2132 if copied:
2133 2133 copies.append(copied)
2134 2134
2135 2135 return wanted
2136 2136
2137 2137 class _followfilter(object):
2138 2138 def __init__(self, repo, onlyfirst=False):
2139 2139 self.repo = repo
2140 2140 self.startrev = nullrev
2141 2141 self.roots = set()
2142 2142 self.onlyfirst = onlyfirst
2143 2143
2144 2144 def match(self, rev):
2145 2145 def realparents(rev):
2146 2146 if self.onlyfirst:
2147 2147 return self.repo.changelog.parentrevs(rev)[0:1]
2148 2148 else:
2149 2149 return filter(lambda x: x != nullrev,
2150 2150 self.repo.changelog.parentrevs(rev))
2151 2151
2152 2152 if self.startrev == nullrev:
2153 2153 self.startrev = rev
2154 2154 return True
2155 2155
2156 2156 if rev > self.startrev:
2157 2157 # forward: all descendants
2158 2158 if not self.roots:
2159 2159 self.roots.add(self.startrev)
2160 2160 for parent in realparents(rev):
2161 2161 if parent in self.roots:
2162 2162 self.roots.add(rev)
2163 2163 return True
2164 2164 else:
2165 2165 # backwards: all parents
2166 2166 if not self.roots:
2167 2167 self.roots.update(realparents(self.startrev))
2168 2168 if rev in self.roots:
2169 2169 self.roots.remove(rev)
2170 2170 self.roots.update(realparents(rev))
2171 2171 return True
2172 2172
2173 2173 return False
2174 2174
2175 2175 def walkchangerevs(repo, match, opts, prepare):
2176 2176 '''Iterate over files and the revs in which they changed.
2177 2177
2178 2178 Callers most commonly need to iterate backwards over the history
2179 2179 in which they are interested. Doing so has awful (quadratic-looking)
2180 2180 performance, so we use iterators in a "windowed" way.
2181 2181
2182 2182 We walk a window of revisions in the desired order. Within the
2183 2183 window, we first walk forwards to gather data, then in the desired
2184 2184 order (usually backwards) to display it.
2185 2185
2186 2186 This function returns an iterator yielding contexts. Before
2187 2187 yielding each context, the iterator will first call the prepare
2188 2188 function on each context in the window in forward order.'''
2189 2189
2190 2190 follow = opts.get('follow') or opts.get('follow_first')
2191 2191 revs = _logrevs(repo, opts)
2192 2192 if not revs:
2193 2193 return []
2194 2194 wanted = set()
2195 2195 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2196 2196 opts.get('removed'))
2197 2197 fncache = {}
2198 2198 change = repo.changectx
2199 2199
2200 2200 # First step is to fill wanted, the set of revisions that we want to yield.
2201 2201 # When it does not induce extra cost, we also fill fncache for revisions in
2202 2202 # wanted: a cache of filenames that were changed (ctx.files()) and that
2203 2203 # match the file filtering conditions.
2204 2204
2205 2205 if match.always():
2206 2206 # No files, no patterns. Display all revs.
2207 2207 wanted = revs
2208 2208 elif not slowpath:
2209 2209 # We only have to read through the filelog to find wanted revisions
2210 2210
2211 2211 try:
2212 2212 wanted = walkfilerevs(repo, match, follow, revs, fncache)
2213 2213 except FileWalkError:
2214 2214 slowpath = True
2215 2215
2216 2216 # We decided to fall back to the slowpath because at least one
2217 2217 # of the paths was not a file. Check to see if at least one of them
2218 2218 # existed in history, otherwise simply return
2219 2219 for path in match.files():
2220 2220 if path == '.' or path in repo.store:
2221 2221 break
2222 2222 else:
2223 2223 return []
2224 2224
2225 2225 if slowpath:
2226 2226 # We have to read the changelog to match filenames against
2227 2227 # changed files
2228 2228
2229 2229 if follow:
2230 2230 raise error.Abort(_('can only follow copies/renames for explicit '
2231 2231 'filenames'))
2232 2232
2233 2233 # The slow path checks files modified in every changeset.
2234 2234 # This is really slow on large repos, so compute the set lazily.
2235 2235 class lazywantedset(object):
2236 2236 def __init__(self):
2237 2237 self.set = set()
2238 2238 self.revs = set(revs)
2239 2239
2240 2240 # No need to worry about locality here because it will be accessed
2241 2241 # in the same order as the increasing window below.
2242 2242 def __contains__(self, value):
2243 2243 if value in self.set:
2244 2244 return True
2245 2245 elif not value in self.revs:
2246 2246 return False
2247 2247 else:
2248 2248 self.revs.discard(value)
2249 2249 ctx = change(value)
2250 2250 matches = filter(match, ctx.files())
2251 2251 if matches:
2252 2252 fncache[value] = matches
2253 2253 self.set.add(value)
2254 2254 return True
2255 2255 return False
2256 2256
2257 2257 def discard(self, value):
2258 2258 self.revs.discard(value)
2259 2259 self.set.discard(value)
2260 2260
2261 2261 wanted = lazywantedset()
2262 2262
2263 2263 # it might be worthwhile to do this in the iterator if the rev range
2264 2264 # is descending and the prune args are all within that range
2265 2265 for rev in opts.get('prune', ()):
2266 2266 rev = repo[rev].rev()
2267 2267 ff = _followfilter(repo)
2268 2268 stop = min(revs[0], revs[-1])
2269 2269 for x in xrange(rev, stop - 1, -1):
2270 2270 if ff.match(x):
2271 2271 wanted = wanted - [x]
2272 2272
2273 2273 # Now that wanted is correctly initialized, we can iterate over the
2274 2274 # revision range, yielding only revisions in wanted.
2275 2275 def iterate():
2276 2276 if follow and match.always():
2277 2277 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
2278 2278 def want(rev):
2279 2279 return ff.match(rev) and rev in wanted
2280 2280 else:
2281 2281 def want(rev):
2282 2282 return rev in wanted
2283 2283
2284 2284 it = iter(revs)
2285 2285 stopiteration = False
2286 2286 for windowsize in increasingwindows():
2287 2287 nrevs = []
2288 2288 for i in xrange(windowsize):
2289 2289 rev = next(it, None)
2290 2290 if rev is None:
2291 2291 stopiteration = True
2292 2292 break
2293 2293 elif want(rev):
2294 2294 nrevs.append(rev)
2295 2295 for rev in sorted(nrevs):
2296 2296 fns = fncache.get(rev)
2297 2297 ctx = change(rev)
2298 2298 if not fns:
2299 2299 def fns_generator():
2300 2300 for f in ctx.files():
2301 2301 if match(f):
2302 2302 yield f
2303 2303 fns = fns_generator()
2304 2304 prepare(ctx, fns)
2305 2305 for rev in nrevs:
2306 2306 yield change(rev)
2307 2307
2308 2308 if stopiteration:
2309 2309 break
2310 2310
2311 2311 return iterate()
2312 2312
2313 2313 def _makefollowlogfilematcher(repo, files, followfirst):
2314 2314 # When displaying a revision with --patch --follow FILE, we have
2315 2315 # to know which file of the revision must be diffed. With
2316 2316 # --follow, we want the names of the ancestors of FILE in the
2317 2317 # revision, stored in "fcache". "fcache" is populated by
2318 2318 # reproducing the graph traversal already done by --follow revset
2319 2319 # and relating revs to file names (which is not "correct" but
2320 2320 # good enough).
2321 2321 fcache = {}
2322 2322 fcacheready = [False]
2323 2323 pctx = repo['.']
2324 2324
2325 2325 def populate():
2326 2326 for fn in files:
2327 2327 fctx = pctx[fn]
2328 2328 fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
2329 2329 for c in fctx.ancestors(followfirst=followfirst):
2330 2330 fcache.setdefault(c.rev(), set()).add(c.path())
2331 2331
2332 2332 def filematcher(rev):
2333 2333 if not fcacheready[0]:
2334 2334 # Lazy initialization
2335 2335 fcacheready[0] = True
2336 2336 populate()
2337 2337 return scmutil.matchfiles(repo, fcache.get(rev, []))
2338 2338
2339 2339 return filematcher
2340 2340
2341 2341 def _makenofollowlogfilematcher(repo, pats, opts):
2342 2342 '''hook for extensions to override the filematcher for non-follow cases'''
2343 2343 return None
2344 2344
2345 2345 def _makelogrevset(repo, pats, opts, revs):
2346 2346 """Return (expr, filematcher) where expr is a revset string built
2347 2347 from log options and file patterns or None. If --stat or --patch
2348 2348 are not passed filematcher is None. Otherwise it is a callable
2349 2349 taking a revision number and returning a match objects filtering
2350 2350 the files to be detailed when displaying the revision.
2351 2351 """
2352 2352 opt2revset = {
2353 2353 'no_merges': ('not merge()', None),
2354 2354 'only_merges': ('merge()', None),
2355 2355 '_ancestors': ('ancestors(%(val)s)', None),
2356 2356 '_fancestors': ('_firstancestors(%(val)s)', None),
2357 2357 '_descendants': ('descendants(%(val)s)', None),
2358 2358 '_fdescendants': ('_firstdescendants(%(val)s)', None),
2359 2359 '_matchfiles': ('_matchfiles(%(val)s)', None),
2360 2360 'date': ('date(%(val)r)', None),
2361 2361 'branch': ('branch(%(val)r)', ' or '),
2362 2362 '_patslog': ('filelog(%(val)r)', ' or '),
2363 2363 '_patsfollow': ('follow(%(val)r)', ' or '),
2364 2364 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
2365 2365 'keyword': ('keyword(%(val)r)', ' or '),
2366 2366 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
2367 2367 'user': ('user(%(val)r)', ' or '),
2368 2368 }
2369 2369
2370 2370 opts = dict(opts)
2371 2371 # follow or not follow?
2372 2372 follow = opts.get('follow') or opts.get('follow_first')
2373 2373 if opts.get('follow_first'):
2374 2374 followfirst = 1
2375 2375 else:
2376 2376 followfirst = 0
2377 2377 # --follow with FILE behavior depends on revs...
2378 2378 it = iter(revs)
2379 2379 startrev = next(it)
2380 2380 followdescendants = startrev < next(it, startrev)
2381 2381
2382 2382 # branch and only_branch are really aliases and must be handled at
2383 2383 # the same time
2384 2384 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
2385 2385 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
2386 2386 # pats/include/exclude are passed to match.match() directly in
2387 2387 # _matchfiles() revset but walkchangerevs() builds its matcher with
2388 2388 # scmutil.match(). The difference is input pats are globbed on
2389 2389 # platforms without shell expansion (windows).
2390 2390 wctx = repo[None]
2391 2391 match, pats = scmutil.matchandpats(wctx, pats, opts)
2392 2392 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2393 2393 opts.get('removed'))
2394 2394 if not slowpath:
2395 2395 for f in match.files():
2396 2396 if follow and f not in wctx:
2397 2397 # If the file exists, it may be a directory, so let it
2398 2398 # take the slow path.
2399 2399 if os.path.exists(repo.wjoin(f)):
2400 2400 slowpath = True
2401 2401 continue
2402 2402 else:
2403 2403 raise error.Abort(_('cannot follow file not in parent '
2404 2404 'revision: "%s"') % f)
2405 2405 filelog = repo.file(f)
2406 2406 if not filelog:
2407 2407 # A zero count may be a directory or deleted file, so
2408 2408 # try to find matching entries on the slow path.
2409 2409 if follow:
2410 2410 raise error.Abort(
2411 2411 _('cannot follow nonexistent file: "%s"') % f)
2412 2412 slowpath = True
2413 2413
2414 2414 # We decided to fall back to the slowpath because at least one
2415 2415 # of the paths was not a file. Check to see if at least one of them
2416 2416 # existed in history - in that case, we'll continue down the
2417 2417 # slowpath; otherwise, we can turn off the slowpath
2418 2418 if slowpath:
2419 2419 for path in match.files():
2420 2420 if path == '.' or path in repo.store:
2421 2421 break
2422 2422 else:
2423 2423 slowpath = False
2424 2424
2425 2425 fpats = ('_patsfollow', '_patsfollowfirst')
2426 2426 fnopats = (('_ancestors', '_fancestors'),
2427 2427 ('_descendants', '_fdescendants'))
2428 2428 if slowpath:
2429 2429 # See walkchangerevs() slow path.
2430 2430 #
2431 2431 # pats/include/exclude cannot be represented as separate
2432 2432 # revset expressions as their filtering logic applies at file
2433 2433 # level. For instance "-I a -X a" matches a revision touching
2434 2434 # "a" and "b" while "file(a) and not file(b)" does
2435 2435 # not. Besides, filesets are evaluated against the working
2436 2436 # directory.
2437 2437 matchargs = ['r:', 'd:relpath']
2438 2438 for p in pats:
2439 2439 matchargs.append('p:' + p)
2440 2440 for p in opts.get('include', []):
2441 2441 matchargs.append('i:' + p)
2442 2442 for p in opts.get('exclude', []):
2443 2443 matchargs.append('x:' + p)
2444 2444 matchargs = ','.join(('%r' % p) for p in matchargs)
2445 2445 opts['_matchfiles'] = matchargs
2446 2446 if follow:
2447 2447 opts[fnopats[0][followfirst]] = '.'
2448 2448 else:
2449 2449 if follow:
2450 2450 if pats:
2451 2451 # follow() revset interprets its file argument as a
2452 2452 # manifest entry, so use match.files(), not pats.
2453 2453 opts[fpats[followfirst]] = list(match.files())
2454 2454 else:
2455 2455 op = fnopats[followdescendants][followfirst]
2456 2456 opts[op] = 'rev(%d)' % startrev
2457 2457 else:
2458 2458 opts['_patslog'] = list(pats)
2459 2459
2460 2460 filematcher = None
2461 2461 if opts.get('patch') or opts.get('stat'):
2462 2462 # When following files, track renames via a special matcher.
2463 2463 # If we're forced to take the slowpath it means we're following
2464 2464 # at least one pattern/directory, so don't bother with rename tracking.
2465 2465 if follow and not match.always() and not slowpath:
2466 2466 # _makefollowlogfilematcher expects its files argument to be
2467 2467 # relative to the repo root, so use match.files(), not pats.
2468 2468 filematcher = _makefollowlogfilematcher(repo, match.files(),
2469 2469 followfirst)
2470 2470 else:
2471 2471 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2472 2472 if filematcher is None:
2473 2473 filematcher = lambda rev: match
2474 2474
2475 2475 expr = []
2476 2476 for op, val in sorted(opts.iteritems()):
2477 2477 if not val:
2478 2478 continue
2479 2479 if op not in opt2revset:
2480 2480 continue
2481 2481 revop, andor = opt2revset[op]
2482 2482 if '%(val)' not in revop:
2483 2483 expr.append(revop)
2484 2484 else:
2485 2485 if not isinstance(val, list):
2486 2486 e = revop % {'val': val}
2487 2487 else:
2488 2488 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2489 2489 expr.append(e)
2490 2490
2491 2491 if expr:
2492 2492 expr = '(' + ' and '.join(expr) + ')'
2493 2493 else:
2494 2494 expr = None
2495 2495 return expr, filematcher
2496 2496
2497 2497 def _logrevs(repo, opts):
2498 2498 # Default --rev value depends on --follow but --follow behavior
2499 2499 # depends on revisions resolved from --rev...
2500 2500 follow = opts.get('follow') or opts.get('follow_first')
2501 2501 if opts.get('rev'):
2502 2502 revs = scmutil.revrange(repo, opts['rev'])
2503 2503 elif follow and repo.dirstate.p1() == nullid:
2504 2504 revs = smartset.baseset()
2505 2505 elif follow:
2506 2506 revs = repo.revs('reverse(:.)')
2507 2507 else:
2508 2508 revs = smartset.spanset(repo)
2509 2509 revs.reverse()
2510 2510 return revs
2511 2511
2512 2512 def getgraphlogrevs(repo, pats, opts):
2513 2513 """Return (revs, expr, filematcher) where revs is an iterable of
2514 2514 revision numbers, expr is a revset string built from log options
2515 2515 and file patterns or None, and used to filter 'revs'. If --stat or
2516 2516 --patch are not passed filematcher is None. Otherwise it is a
2517 2517 callable taking a revision number and returning a match objects
2518 2518 filtering the files to be detailed when displaying the revision.
2519 2519 """
2520 2520 limit = loglimit(opts)
2521 2521 revs = _logrevs(repo, opts)
2522 2522 if not revs:
2523 2523 return smartset.baseset(), None, None
2524 2524 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2525 2525 if opts.get('rev'):
2526 2526 # User-specified revs might be unsorted, but don't sort before
2527 2527 # _makelogrevset because it might depend on the order of revs
2528 2528 if not (revs.isdescending() or revs.istopo()):
2529 2529 revs.sort(reverse=True)
2530 2530 if expr:
2531 2531 matcher = revset.match(repo.ui, expr)
2532 2532 revs = matcher(repo, revs)
2533 2533 if limit is not None:
2534 2534 limitedrevs = []
2535 2535 for idx, rev in enumerate(revs):
2536 2536 if idx >= limit:
2537 2537 break
2538 2538 limitedrevs.append(rev)
2539 2539 revs = smartset.baseset(limitedrevs)
2540 2540
2541 2541 return revs, expr, filematcher
2542 2542
2543 2543 def getlogrevs(repo, pats, opts):
2544 2544 """Return (revs, expr, filematcher) where revs is an iterable of
2545 2545 revision numbers, expr is a revset string built from log options
2546 2546 and file patterns or None, and used to filter 'revs'. If --stat or
2547 2547 --patch are not passed filematcher is None. Otherwise it is a
2548 2548 callable taking a revision number and returning a match objects
2549 2549 filtering the files to be detailed when displaying the revision.
2550 2550 """
2551 2551 limit = loglimit(opts)
2552 2552 revs = _logrevs(repo, opts)
2553 2553 if not revs:
2554 2554 return smartset.baseset([]), None, None
2555 2555 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2556 2556 if expr:
2557 2557 matcher = revset.match(repo.ui, expr)
2558 2558 revs = matcher(repo, revs)
2559 2559 if limit is not None:
2560 2560 limitedrevs = []
2561 2561 for idx, r in enumerate(revs):
2562 2562 if limit <= idx:
2563 2563 break
2564 2564 limitedrevs.append(r)
2565 2565 revs = smartset.baseset(limitedrevs)
2566 2566
2567 2567 return revs, expr, filematcher
2568 2568
2569 2569 def _parselinerangelogopt(repo, opts):
2570 2570 """Parse --line-range log option and return a list of tuples (filename,
2571 2571 (fromline, toline)).
2572 2572 """
2573 2573 linerangebyfname = []
2574 2574 for pat in opts.get('line_range', []):
2575 2575 try:
2576 2576 pat, linerange = pat.rsplit(',', 1)
2577 2577 except ValueError:
2578 2578 raise error.Abort(_('malformatted line-range pattern %s') % pat)
2579 2579 try:
2580 2580 fromline, toline = map(int, linerange.split(':'))
2581 2581 except ValueError:
2582 2582 raise error.Abort(_("invalid line range for %s") % pat)
2583 2583 msg = _("line range pattern '%s' must match exactly one file") % pat
2584 2584 fname = scmutil.parsefollowlinespattern(repo, None, pat, msg)
2585 2585 linerangebyfname.append(
2586 2586 (fname, util.processlinerange(fromline, toline)))
2587 2587 return linerangebyfname
2588 2588
2589 2589 def getloglinerangerevs(repo, userrevs, opts):
2590 2590 """Return (revs, filematcher, hunksfilter).
2591 2591
2592 2592 "revs" are revisions obtained by processing "line-range" log options and
2593 2593 walking block ancestors of each specified file/line-range.
2594 2594
2595 2595 "filematcher(rev) -> match" is a factory function returning a match object
2596 2596 for a given revision for file patterns specified in --line-range option.
2597 2597 If neither --stat nor --patch options are passed, "filematcher" is None.
2598 2598
2599 2599 "hunksfilter(rev) -> filterfn(fctx, hunks)" is a factory function
2600 2600 returning a hunks filtering function.
2601 2601 If neither --stat nor --patch options are passed, "filterhunks" is None.
2602 2602 """
2603 2603 wctx = repo[None]
2604 2604
2605 2605 # Two-levels map of "rev -> file ctx -> [line range]".
2606 2606 linerangesbyrev = {}
2607 2607 for fname, (fromline, toline) in _parselinerangelogopt(repo, opts):
2608 2608 if fname not in wctx:
2609 2609 raise error.Abort(_('cannot follow file not in parent '
2610 2610 'revision: "%s"') % fname)
2611 2611 fctx = wctx.filectx(fname)
2612 2612 for fctx, linerange in dagop.blockancestors(fctx, fromline, toline):
2613 2613 rev = fctx.introrev()
2614 2614 if rev not in userrevs:
2615 2615 continue
2616 2616 linerangesbyrev.setdefault(
2617 2617 rev, {}).setdefault(
2618 2618 fctx.path(), []).append(linerange)
2619 2619
2620 2620 filematcher = None
2621 2621 hunksfilter = None
2622 2622 if opts.get('patch') or opts.get('stat'):
2623 2623
2624 2624 def nofilterhunksfn(fctx, hunks):
2625 2625 return hunks
2626 2626
2627 2627 def hunksfilter(rev):
2628 2628 fctxlineranges = linerangesbyrev.get(rev)
2629 2629 if fctxlineranges is None:
2630 2630 return nofilterhunksfn
2631 2631
2632 2632 def filterfn(fctx, hunks):
2633 2633 lineranges = fctxlineranges.get(fctx.path())
2634 2634 if lineranges is not None:
2635 2635 for hr, lines in hunks:
2636 2636 if hr is None: # binary
2637 2637 yield hr, lines
2638 2638 continue
2639 2639 if any(mdiff.hunkinrange(hr[2:], lr)
2640 2640 for lr in lineranges):
2641 2641 yield hr, lines
2642 2642 else:
2643 2643 for hunk in hunks:
2644 2644 yield hunk
2645 2645
2646 2646 return filterfn
2647 2647
2648 2648 def filematcher(rev):
2649 2649 files = list(linerangesbyrev.get(rev, []))
2650 2650 return scmutil.matchfiles(repo, files)
2651 2651
2652 2652 revs = sorted(linerangesbyrev, reverse=True)
2653 2653
2654 2654 return revs, filematcher, hunksfilter
2655 2655
2656 2656 def _graphnodeformatter(ui, displayer):
2657 2657 spec = ui.config('ui', 'graphnodetemplate')
2658 2658 if not spec:
2659 2659 return templatekw.showgraphnode # fast path for "{graphnode}"
2660 2660
2661 2661 spec = templater.unquotestring(spec)
2662 2662 templ = formatter.maketemplater(ui, spec)
2663 2663 cache = {}
2664 2664 if isinstance(displayer, changeset_templater):
2665 2665 cache = displayer.cache # reuse cache of slow templates
2666 2666 props = templatekw.keywords.copy()
2667 2667 props['templ'] = templ
2668 2668 props['cache'] = cache
2669 2669 def formatnode(repo, ctx):
2670 2670 props['ctx'] = ctx
2671 2671 props['repo'] = repo
2672 2672 props['ui'] = repo.ui
2673 2673 props['revcache'] = {}
2674 2674 return templ.render(props)
2675 2675 return formatnode
2676 2676
2677 2677 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2678 2678 filematcher=None, props=None):
2679 2679 props = props or {}
2680 2680 formatnode = _graphnodeformatter(ui, displayer)
2681 2681 state = graphmod.asciistate()
2682 2682 styles = state['styles']
2683 2683
2684 2684 # only set graph styling if HGPLAIN is not set.
2685 2685 if ui.plain('graph'):
2686 2686 # set all edge styles to |, the default pre-3.8 behaviour
2687 2687 styles.update(dict.fromkeys(styles, '|'))
2688 2688 else:
2689 2689 edgetypes = {
2690 2690 'parent': graphmod.PARENT,
2691 2691 'grandparent': graphmod.GRANDPARENT,
2692 2692 'missing': graphmod.MISSINGPARENT
2693 2693 }
2694 2694 for name, key in edgetypes.items():
2695 2695 # experimental config: experimental.graphstyle.*
2696 2696 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2697 2697 styles[key])
2698 2698 if not styles[key]:
2699 2699 styles[key] = None
2700 2700
2701 2701 # experimental config: experimental.graphshorten
2702 2702 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2703 2703
2704 2704 for rev, type, ctx, parents in dag:
2705 2705 char = formatnode(repo, ctx)
2706 2706 copies = None
2707 2707 if getrenamed and ctx.rev():
2708 2708 copies = []
2709 2709 for fn in ctx.files():
2710 2710 rename = getrenamed(fn, ctx.rev())
2711 2711 if rename:
2712 2712 copies.append((fn, rename[0]))
2713 2713 revmatchfn = None
2714 2714 if filematcher is not None:
2715 2715 revmatchfn = filematcher(ctx.rev())
2716 2716 edges = edgefn(type, char, state, rev, parents)
2717 2717 firstedge = next(edges)
2718 2718 width = firstedge[2]
2719 2719 displayer.show(ctx, copies=copies, matchfn=revmatchfn,
2720 2720 _graphwidth=width, **props)
2721 2721 lines = displayer.hunk.pop(rev).split('\n')
2722 2722 if not lines[-1]:
2723 2723 del lines[-1]
2724 2724 displayer.flush(ctx)
2725 2725 for type, char, width, coldata in itertools.chain([firstedge], edges):
2726 2726 graphmod.ascii(ui, state, type, char, lines, coldata)
2727 2727 lines = []
2728 2728 displayer.close()
2729 2729
2730 2730 def graphlog(ui, repo, pats, opts):
2731 2731 # Parameters are identical to log command ones
2732 2732 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2733 2733 revdag = graphmod.dagwalker(repo, revs)
2734 2734
2735 2735 getrenamed = None
2736 2736 if opts.get('copies'):
2737 2737 endrev = None
2738 2738 if opts.get('rev'):
2739 2739 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2740 2740 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2741 2741
2742 2742 ui.pager('log')
2743 2743 displayer = show_changeset(ui, repo, opts, buffered=True)
2744 2744 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2745 2745 filematcher)
2746 2746
2747 2747 def checkunsupportedgraphflags(pats, opts):
2748 2748 for op in ["newest_first"]:
2749 2749 if op in opts and opts[op]:
2750 2750 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2751 2751 % op.replace("_", "-"))
2752 2752
2753 2753 def graphrevs(repo, nodes, opts):
2754 2754 limit = loglimit(opts)
2755 2755 nodes.reverse()
2756 2756 if limit is not None:
2757 2757 nodes = nodes[:limit]
2758 2758 return graphmod.nodes(repo, nodes)
2759 2759
2760 2760 def add(ui, repo, match, prefix, explicitonly, **opts):
2761 2761 join = lambda f: os.path.join(prefix, f)
2762 2762 bad = []
2763 2763
2764 2764 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2765 2765 names = []
2766 2766 wctx = repo[None]
2767 2767 cca = None
2768 2768 abort, warn = scmutil.checkportabilityalert(ui)
2769 2769 if abort or warn:
2770 2770 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2771 2771
2772 2772 badmatch = matchmod.badmatch(match, badfn)
2773 2773 dirstate = repo.dirstate
2774 2774 # We don't want to just call wctx.walk here, since it would return a lot of
2775 2775 # clean files, which we aren't interested in and takes time.
2776 2776 for f in sorted(dirstate.walk(badmatch, subrepos=sorted(wctx.substate),
2777 2777 unknown=True, ignored=False, full=False)):
2778 2778 exact = match.exact(f)
2779 2779 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2780 2780 if cca:
2781 2781 cca(f)
2782 2782 names.append(f)
2783 2783 if ui.verbose or not exact:
2784 2784 ui.status(_('adding %s\n') % match.rel(f))
2785 2785
2786 2786 for subpath in sorted(wctx.substate):
2787 2787 sub = wctx.sub(subpath)
2788 2788 try:
2789 2789 submatch = matchmod.subdirmatcher(subpath, match)
2790 2790 if opts.get(r'subrepos'):
2791 2791 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2792 2792 else:
2793 2793 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2794 2794 except error.LookupError:
2795 2795 ui.status(_("skipping missing subrepository: %s\n")
2796 2796 % join(subpath))
2797 2797
2798 2798 if not opts.get(r'dry_run'):
2799 2799 rejected = wctx.add(names, prefix)
2800 2800 bad.extend(f for f in rejected if f in match.files())
2801 2801 return bad
2802 2802
2803 2803 def addwebdirpath(repo, serverpath, webconf):
2804 2804 webconf[serverpath] = repo.root
2805 2805 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
2806 2806
2807 2807 for r in repo.revs('filelog("path:.hgsub")'):
2808 2808 ctx = repo[r]
2809 2809 for subpath in ctx.substate:
2810 2810 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2811 2811
2812 2812 def forget(ui, repo, match, prefix, explicitonly):
2813 2813 join = lambda f: os.path.join(prefix, f)
2814 2814 bad = []
2815 2815 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2816 2816 wctx = repo[None]
2817 2817 forgot = []
2818 2818
2819 2819 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2820 2820 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2821 2821 if explicitonly:
2822 2822 forget = [f for f in forget if match.exact(f)]
2823 2823
2824 2824 for subpath in sorted(wctx.substate):
2825 2825 sub = wctx.sub(subpath)
2826 2826 try:
2827 2827 submatch = matchmod.subdirmatcher(subpath, match)
2828 2828 subbad, subforgot = sub.forget(submatch, prefix)
2829 2829 bad.extend([subpath + '/' + f for f in subbad])
2830 2830 forgot.extend([subpath + '/' + f for f in subforgot])
2831 2831 except error.LookupError:
2832 2832 ui.status(_("skipping missing subrepository: %s\n")
2833 2833 % join(subpath))
2834 2834
2835 2835 if not explicitonly:
2836 2836 for f in match.files():
2837 2837 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2838 2838 if f not in forgot:
2839 2839 if repo.wvfs.exists(f):
2840 2840 # Don't complain if the exact case match wasn't given.
2841 2841 # But don't do this until after checking 'forgot', so
2842 2842 # that subrepo files aren't normalized, and this op is
2843 2843 # purely from data cached by the status walk above.
2844 2844 if repo.dirstate.normalize(f) in repo.dirstate:
2845 2845 continue
2846 2846 ui.warn(_('not removing %s: '
2847 2847 'file is already untracked\n')
2848 2848 % match.rel(f))
2849 2849 bad.append(f)
2850 2850
2851 2851 for f in forget:
2852 2852 if ui.verbose or not match.exact(f):
2853 2853 ui.status(_('removing %s\n') % match.rel(f))
2854 2854
2855 2855 rejected = wctx.forget(forget, prefix)
2856 2856 bad.extend(f for f in rejected if f in match.files())
2857 2857 forgot.extend(f for f in forget if f not in rejected)
2858 2858 return bad, forgot
2859 2859
2860 2860 def files(ui, ctx, m, fm, fmt, subrepos):
2861 2861 rev = ctx.rev()
2862 2862 ret = 1
2863 2863 ds = ctx.repo().dirstate
2864 2864
2865 2865 for f in ctx.matches(m):
2866 2866 if rev is None and ds[f] == 'r':
2867 2867 continue
2868 2868 fm.startitem()
2869 2869 if ui.verbose:
2870 2870 fc = ctx[f]
2871 2871 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2872 2872 fm.data(abspath=f)
2873 2873 fm.write('path', fmt, m.rel(f))
2874 2874 ret = 0
2875 2875
2876 2876 for subpath in sorted(ctx.substate):
2877 2877 submatch = matchmod.subdirmatcher(subpath, m)
2878 2878 if (subrepos or m.exact(subpath) or any(submatch.files())):
2879 2879 sub = ctx.sub(subpath)
2880 2880 try:
2881 2881 recurse = m.exact(subpath) or subrepos
2882 2882 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2883 2883 ret = 0
2884 2884 except error.LookupError:
2885 2885 ui.status(_("skipping missing subrepository: %s\n")
2886 2886 % m.abs(subpath))
2887 2887
2888 2888 return ret
2889 2889
2890 2890 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2891 2891 join = lambda f: os.path.join(prefix, f)
2892 2892 ret = 0
2893 2893 s = repo.status(match=m, clean=True)
2894 2894 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2895 2895
2896 2896 wctx = repo[None]
2897 2897
2898 2898 if warnings is None:
2899 2899 warnings = []
2900 2900 warn = True
2901 2901 else:
2902 2902 warn = False
2903 2903
2904 2904 subs = sorted(wctx.substate)
2905 2905 total = len(subs)
2906 2906 count = 0
2907 2907 for subpath in subs:
2908 2908 count += 1
2909 2909 submatch = matchmod.subdirmatcher(subpath, m)
2910 2910 if subrepos or m.exact(subpath) or any(submatch.files()):
2911 2911 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2912 2912 sub = wctx.sub(subpath)
2913 2913 try:
2914 2914 if sub.removefiles(submatch, prefix, after, force, subrepos,
2915 2915 warnings):
2916 2916 ret = 1
2917 2917 except error.LookupError:
2918 2918 warnings.append(_("skipping missing subrepository: %s\n")
2919 2919 % join(subpath))
2920 2920 ui.progress(_('searching'), None)
2921 2921
2922 2922 # warn about failure to delete explicit files/dirs
2923 2923 deleteddirs = util.dirs(deleted)
2924 2924 files = m.files()
2925 2925 total = len(files)
2926 2926 count = 0
2927 2927 for f in files:
2928 2928 def insubrepo():
2929 2929 for subpath in wctx.substate:
2930 2930 if f.startswith(subpath + '/'):
2931 2931 return True
2932 2932 return False
2933 2933
2934 2934 count += 1
2935 2935 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2936 2936 isdir = f in deleteddirs or wctx.hasdir(f)
2937 2937 if (f in repo.dirstate or isdir or f == '.'
2938 2938 or insubrepo() or f in subs):
2939 2939 continue
2940 2940
2941 2941 if repo.wvfs.exists(f):
2942 2942 if repo.wvfs.isdir(f):
2943 2943 warnings.append(_('not removing %s: no tracked files\n')
2944 2944 % m.rel(f))
2945 2945 else:
2946 2946 warnings.append(_('not removing %s: file is untracked\n')
2947 2947 % m.rel(f))
2948 2948 # missing files will generate a warning elsewhere
2949 2949 ret = 1
2950 2950 ui.progress(_('deleting'), None)
2951 2951
2952 2952 if force:
2953 2953 list = modified + deleted + clean + added
2954 2954 elif after:
2955 2955 list = deleted
2956 2956 remaining = modified + added + clean
2957 2957 total = len(remaining)
2958 2958 count = 0
2959 2959 for f in remaining:
2960 2960 count += 1
2961 2961 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2962 2962 if ui.verbose or (f in files):
2963 2963 warnings.append(_('not removing %s: file still exists\n')
2964 2964 % m.rel(f))
2965 2965 ret = 1
2966 2966 ui.progress(_('skipping'), None)
2967 2967 else:
2968 2968 list = deleted + clean
2969 2969 total = len(modified) + len(added)
2970 2970 count = 0
2971 2971 for f in modified:
2972 2972 count += 1
2973 2973 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2974 2974 warnings.append(_('not removing %s: file is modified (use -f'
2975 2975 ' to force removal)\n') % m.rel(f))
2976 2976 ret = 1
2977 2977 for f in added:
2978 2978 count += 1
2979 2979 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2980 2980 warnings.append(_("not removing %s: file has been marked for add"
2981 2981 " (use 'hg forget' to undo add)\n") % m.rel(f))
2982 2982 ret = 1
2983 2983 ui.progress(_('skipping'), None)
2984 2984
2985 2985 list = sorted(list)
2986 2986 total = len(list)
2987 2987 count = 0
2988 2988 for f in list:
2989 2989 count += 1
2990 2990 if ui.verbose or not m.exact(f):
2991 2991 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2992 2992 ui.status(_('removing %s\n') % m.rel(f))
2993 2993 ui.progress(_('deleting'), None)
2994 2994
2995 2995 with repo.wlock():
2996 2996 if not after:
2997 2997 for f in list:
2998 2998 if f in added:
2999 2999 continue # we never unlink added files on remove
3000 3000 repo.wvfs.unlinkpath(f, ignoremissing=True)
3001 3001 repo[None].forget(list)
3002 3002
3003 3003 if warn:
3004 3004 for warning in warnings:
3005 3005 ui.warn(warning)
3006 3006
3007 3007 return ret
3008 3008
3009 3009 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
3010 3010 err = 1
3011 3011
3012 3012 def write(path):
3013 3013 filename = None
3014 3014 if fntemplate:
3015 3015 filename = makefilename(repo, fntemplate, ctx.node(),
3016 3016 pathname=os.path.join(prefix, path))
3017 3017 # attempt to create the directory if it does not already exist
3018 3018 try:
3019 3019 os.makedirs(os.path.dirname(filename))
3020 3020 except OSError:
3021 3021 pass
3022 3022 with formatter.maybereopen(basefm, filename, opts) as fm:
3023 3023 data = ctx[path].data()
3024 3024 if opts.get('decode'):
3025 3025 data = repo.wwritedata(path, data)
3026 3026 fm.startitem()
3027 3027 fm.write('data', '%s', data)
3028 3028 fm.data(abspath=path, path=matcher.rel(path))
3029 3029
3030 3030 # Automation often uses hg cat on single files, so special case it
3031 3031 # for performance to avoid the cost of parsing the manifest.
3032 3032 if len(matcher.files()) == 1 and not matcher.anypats():
3033 3033 file = matcher.files()[0]
3034 3034 mfl = repo.manifestlog
3035 3035 mfnode = ctx.manifestnode()
3036 3036 try:
3037 3037 if mfnode and mfl[mfnode].find(file)[0]:
3038 3038 write(file)
3039 3039 return 0
3040 3040 except KeyError:
3041 3041 pass
3042 3042
3043 3043 for abs in ctx.walk(matcher):
3044 3044 write(abs)
3045 3045 err = 0
3046 3046
3047 3047 for subpath in sorted(ctx.substate):
3048 3048 sub = ctx.sub(subpath)
3049 3049 try:
3050 3050 submatch = matchmod.subdirmatcher(subpath, matcher)
3051 3051
3052 3052 if not sub.cat(submatch, basefm, fntemplate,
3053 3053 os.path.join(prefix, sub._path), **opts):
3054 3054 err = 0
3055 3055 except error.RepoLookupError:
3056 3056 ui.status(_("skipping missing subrepository: %s\n")
3057 3057 % os.path.join(prefix, subpath))
3058 3058
3059 3059 return err
3060 3060
3061 3061 def commit(ui, repo, commitfunc, pats, opts):
3062 3062 '''commit the specified files or all outstanding changes'''
3063 3063 date = opts.get('date')
3064 3064 if date:
3065 3065 opts['date'] = util.parsedate(date)
3066 3066 message = logmessage(ui, opts)
3067 3067 matcher = scmutil.match(repo[None], pats, opts)
3068 3068
3069 3069 dsguard = None
3070 3070 # extract addremove carefully -- this function can be called from a command
3071 3071 # that doesn't support addremove
3072 3072 if opts.get('addremove'):
3073 3073 dsguard = dirstateguard.dirstateguard(repo, 'commit')
3074 3074 with dsguard or util.nullcontextmanager():
3075 3075 if dsguard:
3076 3076 if scmutil.addremove(repo, matcher, "", opts) != 0:
3077 3077 raise error.Abort(
3078 3078 _("failed to mark all new/missing files as added/removed"))
3079 3079
3080 3080 return commitfunc(ui, repo, message, matcher, opts)
3081 3081
3082 3082 def samefile(f, ctx1, ctx2):
3083 3083 if f in ctx1.manifest():
3084 3084 a = ctx1.filectx(f)
3085 3085 if f in ctx2.manifest():
3086 3086 b = ctx2.filectx(f)
3087 3087 return (not a.cmp(b)
3088 3088 and a.flags() == b.flags())
3089 3089 else:
3090 3090 return False
3091 3091 else:
3092 3092 return f not in ctx2.manifest()
3093 3093
3094 3094 def amend(ui, repo, old, extra, pats, opts):
3095 3095 # avoid cycle context -> subrepo -> cmdutil
3096 3096 from . import context
3097 3097
3098 3098 # amend will reuse the existing user if not specified, but the obsolete
3099 3099 # marker creation requires that the current user's name is specified.
3100 3100 if obsolete.isenabled(repo, obsolete.createmarkersopt):
3101 3101 ui.username() # raise exception if username not set
3102 3102
3103 3103 ui.note(_('amending changeset %s\n') % old)
3104 3104 base = old.p1()
3105 3105
3106 3106 with repo.wlock(), repo.lock(), repo.transaction('amend'):
3107 3107 # Participating changesets:
3108 3108 #
3109 3109 # wctx o - workingctx that contains changes from working copy
3110 3110 # | to go into amending commit
3111 3111 # |
3112 3112 # old o - changeset to amend
3113 3113 # |
3114 3114 # base o - first parent of the changeset to amend
3115 3115 wctx = repo[None]
3116 3116
3117 3117 # Copy to avoid mutating input
3118 3118 extra = extra.copy()
3119 3119 # Update extra dict from amended commit (e.g. to preserve graft
3120 3120 # source)
3121 3121 extra.update(old.extra())
3122 3122
3123 3123 # Also update it from the from the wctx
3124 3124 extra.update(wctx.extra())
3125 3125
3126 3126 user = opts.get('user') or old.user()
3127 3127 date = opts.get('date') or old.date()
3128 3128
3129 3129 # Parse the date to allow comparison between date and old.date()
3130 3130 date = util.parsedate(date)
3131 3131
3132 3132 if len(old.parents()) > 1:
3133 3133 # ctx.files() isn't reliable for merges, so fall back to the
3134 3134 # slower repo.status() method
3135 3135 files = set([fn for st in repo.status(base, old)[:3]
3136 3136 for fn in st])
3137 3137 else:
3138 3138 files = set(old.files())
3139 3139
3140 3140 # add/remove the files to the working copy if the "addremove" option
3141 3141 # was specified.
3142 3142 matcher = scmutil.match(wctx, pats, opts)
3143 3143 if (opts.get('addremove')
3144 3144 and scmutil.addremove(repo, matcher, "", opts)):
3145 3145 raise error.Abort(
3146 3146 _("failed to mark all new/missing files as added/removed"))
3147 3147
3148 3148 # Check subrepos. This depends on in-place wctx._status update in
3149 3149 # subrepo.precommit(). To minimize the risk of this hack, we do
3150 3150 # nothing if .hgsub does not exist.
3151 3151 if '.hgsub' in wctx or '.hgsub' in old:
3152 3152 from . import subrepo # avoid cycle: cmdutil -> subrepo -> cmdutil
3153 3153 subs, commitsubs, newsubstate = subrepo.precommit(
3154 3154 ui, wctx, wctx._status, matcher)
3155 3155 # amend should abort if commitsubrepos is enabled
3156 3156 assert not commitsubs
3157 3157 if subs:
3158 3158 subrepo.writestate(repo, newsubstate)
3159 3159
3160 3160 filestoamend = set(f for f in wctx.files() if matcher(f))
3161 3161
3162 3162 changes = (len(filestoamend) > 0)
3163 3163 if changes:
3164 3164 # Recompute copies (avoid recording a -> b -> a)
3165 3165 copied = copies.pathcopies(base, wctx, matcher)
3166 3166 if old.p2:
3167 3167 copied.update(copies.pathcopies(old.p2(), wctx, matcher))
3168 3168
3169 3169 # Prune files which were reverted by the updates: if old
3170 3170 # introduced file X and the file was renamed in the working
3171 3171 # copy, then those two files are the same and
3172 3172 # we can discard X from our list of files. Likewise if X
3173 3173 # was removed, it's no longer relevant. If X is missing (aka
3174 3174 # deleted), old X must be preserved.
3175 3175 files.update(filestoamend)
3176 3176 files = [f for f in files if (not samefile(f, wctx, base)
3177 3177 or f in wctx.deleted())]
3178 3178
3179 3179 def filectxfn(repo, ctx_, path):
3180 3180 try:
3181 3181 # If the file being considered is not amongst the files
3182 3182 # to be amended, we should return the file context from the
3183 3183 # old changeset. This avoids issues when only some files in
3184 3184 # the working copy are being amended but there are also
3185 3185 # changes to other files from the old changeset.
3186 3186 if path not in filestoamend:
3187 3187 return old.filectx(path)
3188 3188
3189 3189 # Return None for removed files.
3190 3190 if path in wctx.removed():
3191 3191 return None
3192 3192
3193 3193 fctx = wctx[path]
3194 3194 flags = fctx.flags()
3195 3195 mctx = context.memfilectx(repo,
3196 3196 fctx.path(), fctx.data(),
3197 3197 islink='l' in flags,
3198 3198 isexec='x' in flags,
3199 3199 copied=copied.get(path))
3200 3200 return mctx
3201 3201 except KeyError:
3202 3202 return None
3203 3203 else:
3204 3204 ui.note(_('copying changeset %s to %s\n') % (old, base))
3205 3205
3206 3206 # Use version of files as in the old cset
3207 3207 def filectxfn(repo, ctx_, path):
3208 3208 try:
3209 3209 return old.filectx(path)
3210 3210 except KeyError:
3211 3211 return None
3212 3212
3213 3213 # See if we got a message from -m or -l, if not, open the editor with
3214 3214 # the message of the changeset to amend.
3215 3215 message = logmessage(ui, opts)
3216 3216
3217 3217 editform = mergeeditform(old, 'commit.amend')
3218 3218 editor = getcommiteditor(editform=editform,
3219 3219 **pycompat.strkwargs(opts))
3220 3220
3221 3221 if not message:
3222 3222 editor = getcommiteditor(edit=True, editform=editform)
3223 3223 message = old.description()
3224 3224
3225 3225 pureextra = extra.copy()
3226 3226 extra['amend_source'] = old.hex()
3227 3227
3228 3228 new = context.memctx(repo,
3229 3229 parents=[base.node(), old.p2().node()],
3230 3230 text=message,
3231 3231 files=files,
3232 3232 filectxfn=filectxfn,
3233 3233 user=user,
3234 3234 date=date,
3235 3235 extra=extra,
3236 3236 editor=editor)
3237 3237
3238 3238 newdesc = changelog.stripdesc(new.description())
3239 3239 if ((not changes)
3240 3240 and newdesc == old.description()
3241 3241 and user == old.user()
3242 3242 and date == old.date()
3243 3243 and pureextra == old.extra()):
3244 3244 # nothing changed. continuing here would create a new node
3245 3245 # anyway because of the amend_source noise.
3246 3246 #
3247 3247 # This not what we expect from amend.
3248 3248 return old.node()
3249 3249
3250 3250 if opts.get('secret'):
3251 3251 commitphase = 'secret'
3252 3252 else:
3253 3253 commitphase = old.phase()
3254 3254 overrides = {('phases', 'new-commit'): commitphase}
3255 3255 with ui.configoverride(overrides, 'amend'):
3256 3256 newid = repo.commitctx(new)
3257 3257
3258 3258 # Reroute the working copy parent to the new changeset
3259 3259 repo.setparents(newid, nullid)
3260 3260 mapping = {old.node(): (newid,)}
3261 3261 obsmetadata = None
3262 3262 if opts.get('note'):
3263 3263 obsmetadata = {'note': opts['note']}
3264 3264 scmutil.cleanupnodes(repo, mapping, 'amend', metadata=obsmetadata)
3265 3265
3266 3266 # Fixing the dirstate because localrepo.commitctx does not update
3267 3267 # it. This is rather convenient because we did not need to update
3268 3268 # the dirstate for all the files in the new commit which commitctx
3269 3269 # could have done if it updated the dirstate. Now, we can
3270 3270 # selectively update the dirstate only for the amended files.
3271 3271 dirstate = repo.dirstate
3272 3272
3273 3273 # Update the state of the files which were added and
3274 3274 # and modified in the amend to "normal" in the dirstate.
3275 3275 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
3276 3276 for f in normalfiles:
3277 3277 dirstate.normal(f)
3278 3278
3279 3279 # Update the state of files which were removed in the amend
3280 3280 # to "removed" in the dirstate.
3281 3281 removedfiles = set(wctx.removed()) & filestoamend
3282 3282 for f in removedfiles:
3283 3283 dirstate.drop(f)
3284 3284
3285 3285 return newid
3286 3286
3287 3287 def commiteditor(repo, ctx, subs, editform=''):
3288 3288 if ctx.description():
3289 3289 return ctx.description()
3290 3290 return commitforceeditor(repo, ctx, subs, editform=editform,
3291 3291 unchangedmessagedetection=True)
3292 3292
3293 3293 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
3294 3294 editform='', unchangedmessagedetection=False):
3295 3295 if not extramsg:
3296 3296 extramsg = _("Leave message empty to abort commit.")
3297 3297
3298 3298 forms = [e for e in editform.split('.') if e]
3299 3299 forms.insert(0, 'changeset')
3300 3300 templatetext = None
3301 3301 while forms:
3302 3302 ref = '.'.join(forms)
3303 3303 if repo.ui.config('committemplate', ref):
3304 3304 templatetext = committext = buildcommittemplate(
3305 3305 repo, ctx, subs, extramsg, ref)
3306 3306 break
3307 3307 forms.pop()
3308 3308 else:
3309 3309 committext = buildcommittext(repo, ctx, subs, extramsg)
3310 3310
3311 3311 # run editor in the repository root
3312 3312 olddir = pycompat.getcwd()
3313 3313 os.chdir(repo.root)
3314 3314
3315 3315 # make in-memory changes visible to external process
3316 3316 tr = repo.currenttransaction()
3317 3317 repo.dirstate.write(tr)
3318 3318 pending = tr and tr.writepending() and repo.root
3319 3319
3320 3320 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
3321 3321 editform=editform, pending=pending,
3322 3322 repopath=repo.path, action='commit')
3323 3323 text = editortext
3324 3324
3325 3325 # strip away anything below this special string (used for editors that want
3326 3326 # to display the diff)
3327 3327 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
3328 3328 if stripbelow:
3329 3329 text = text[:stripbelow.start()]
3330 3330
3331 3331 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
3332 3332 os.chdir(olddir)
3333 3333
3334 3334 if finishdesc:
3335 3335 text = finishdesc(text)
3336 3336 if not text.strip():
3337 3337 raise error.Abort(_("empty commit message"))
3338 3338 if unchangedmessagedetection and editortext == templatetext:
3339 3339 raise error.Abort(_("commit message unchanged"))
3340 3340
3341 3341 return text
3342 3342
3343 3343 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
3344 3344 ui = repo.ui
3345 3345 spec = formatter.templatespec(ref, None, None)
3346 3346 t = changeset_templater(ui, repo, spec, None, {}, False)
3347 3347 t.t.cache.update((k, templater.unquotestring(v))
3348 3348 for k, v in repo.ui.configitems('committemplate'))
3349 3349
3350 3350 if not extramsg:
3351 3351 extramsg = '' # ensure that extramsg is string
3352 3352
3353 3353 ui.pushbuffer()
3354 3354 t.show(ctx, extramsg=extramsg)
3355 3355 return ui.popbuffer()
3356 3356
3357 3357 def hgprefix(msg):
3358 3358 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
3359 3359
3360 3360 def buildcommittext(repo, ctx, subs, extramsg):
3361 3361 edittext = []
3362 3362 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
3363 3363 if ctx.description():
3364 3364 edittext.append(ctx.description())
3365 3365 edittext.append("")
3366 3366 edittext.append("") # Empty line between message and comments.
3367 3367 edittext.append(hgprefix(_("Enter commit message."
3368 3368 " Lines beginning with 'HG:' are removed.")))
3369 3369 edittext.append(hgprefix(extramsg))
3370 3370 edittext.append("HG: --")
3371 3371 edittext.append(hgprefix(_("user: %s") % ctx.user()))
3372 3372 if ctx.p2():
3373 3373 edittext.append(hgprefix(_("branch merge")))
3374 3374 if ctx.branch():
3375 3375 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
3376 3376 if bookmarks.isactivewdirparent(repo):
3377 3377 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
3378 3378 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
3379 3379 edittext.extend([hgprefix(_("added %s") % f) for f in added])
3380 3380 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
3381 3381 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
3382 3382 if not added and not modified and not removed:
3383 3383 edittext.append(hgprefix(_("no files changed")))
3384 3384 edittext.append("")
3385 3385
3386 3386 return "\n".join(edittext)
3387 3387
3388 3388 def commitstatus(repo, node, branch, bheads=None, opts=None):
3389 3389 if opts is None:
3390 3390 opts = {}
3391 3391 ctx = repo[node]
3392 3392 parents = ctx.parents()
3393 3393
3394 3394 if (not opts.get('amend') and bheads and node not in bheads and not
3395 3395 [x for x in parents if x.node() in bheads and x.branch() == branch]):
3396 3396 repo.ui.status(_('created new head\n'))
3397 3397 # The message is not printed for initial roots. For the other
3398 3398 # changesets, it is printed in the following situations:
3399 3399 #
3400 3400 # Par column: for the 2 parents with ...
3401 3401 # N: null or no parent
3402 3402 # B: parent is on another named branch
3403 3403 # C: parent is a regular non head changeset
3404 3404 # H: parent was a branch head of the current branch
3405 3405 # Msg column: whether we print "created new head" message
3406 3406 # In the following, it is assumed that there already exists some
3407 3407 # initial branch heads of the current branch, otherwise nothing is
3408 3408 # printed anyway.
3409 3409 #
3410 3410 # Par Msg Comment
3411 3411 # N N y additional topo root
3412 3412 #
3413 3413 # B N y additional branch root
3414 3414 # C N y additional topo head
3415 3415 # H N n usual case
3416 3416 #
3417 3417 # B B y weird additional branch root
3418 3418 # C B y branch merge
3419 3419 # H B n merge with named branch
3420 3420 #
3421 3421 # C C y additional head from merge
3422 3422 # C H n merge with a head
3423 3423 #
3424 3424 # H H n head merge: head count decreases
3425 3425
3426 3426 if not opts.get('close_branch'):
3427 3427 for r in parents:
3428 3428 if r.closesbranch() and r.branch() == branch:
3429 3429 repo.ui.status(_('reopening closed branch head %d\n') % r)
3430 3430
3431 3431 if repo.ui.debugflag:
3432 3432 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
3433 3433 elif repo.ui.verbose:
3434 3434 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
3435 3435
3436 3436 def postcommitstatus(repo, pats, opts):
3437 3437 return repo.status(match=scmutil.match(repo[None], pats, opts))
3438 3438
3439 3439 def revert(ui, repo, ctx, parents, *pats, **opts):
3440 3440 opts = pycompat.byteskwargs(opts)
3441 3441 parent, p2 = parents
3442 3442 node = ctx.node()
3443 3443
3444 3444 mf = ctx.manifest()
3445 3445 if node == p2:
3446 3446 parent = p2
3447 3447
3448 3448 # need all matching names in dirstate and manifest of target rev,
3449 3449 # so have to walk both. do not print errors if files exist in one
3450 3450 # but not other. in both cases, filesets should be evaluated against
3451 3451 # workingctx to get consistent result (issue4497). this means 'set:**'
3452 3452 # cannot be used to select missing files from target rev.
3453 3453
3454 3454 # `names` is a mapping for all elements in working copy and target revision
3455 3455 # The mapping is in the form:
3456 3456 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3457 3457 names = {}
3458 3458
3459 3459 with repo.wlock():
3460 3460 ## filling of the `names` mapping
3461 3461 # walk dirstate to fill `names`
3462 3462
3463 3463 interactive = opts.get('interactive', False)
3464 3464 wctx = repo[None]
3465 3465 m = scmutil.match(wctx, pats, opts)
3466 3466
3467 3467 # we'll need this later
3468 3468 targetsubs = sorted(s for s in wctx.substate if m(s))
3469 3469
3470 3470 if not m.always():
3471 3471 matcher = matchmod.badmatch(m, lambda x, y: False)
3472 3472 for abs in wctx.walk(matcher):
3473 3473 names[abs] = m.rel(abs), m.exact(abs)
3474 3474
3475 3475 # walk target manifest to fill `names`
3476 3476
3477 3477 def badfn(path, msg):
3478 3478 if path in names:
3479 3479 return
3480 3480 if path in ctx.substate:
3481 3481 return
3482 3482 path_ = path + '/'
3483 3483 for f in names:
3484 3484 if f.startswith(path_):
3485 3485 return
3486 3486 ui.warn("%s: %s\n" % (m.rel(path), msg))
3487 3487
3488 3488 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3489 3489 if abs not in names:
3490 3490 names[abs] = m.rel(abs), m.exact(abs)
3491 3491
3492 3492 # Find status of all file in `names`.
3493 3493 m = scmutil.matchfiles(repo, names)
3494 3494
3495 3495 changes = repo.status(node1=node, match=m,
3496 3496 unknown=True, ignored=True, clean=True)
3497 3497 else:
3498 3498 changes = repo.status(node1=node, match=m)
3499 3499 for kind in changes:
3500 3500 for abs in kind:
3501 3501 names[abs] = m.rel(abs), m.exact(abs)
3502 3502
3503 3503 m = scmutil.matchfiles(repo, names)
3504 3504
3505 3505 modified = set(changes.modified)
3506 3506 added = set(changes.added)
3507 3507 removed = set(changes.removed)
3508 3508 _deleted = set(changes.deleted)
3509 3509 unknown = set(changes.unknown)
3510 3510 unknown.update(changes.ignored)
3511 3511 clean = set(changes.clean)
3512 3512 modadded = set()
3513 3513
3514 3514 # We need to account for the state of the file in the dirstate,
3515 3515 # even when we revert against something else than parent. This will
3516 3516 # slightly alter the behavior of revert (doing back up or not, delete
3517 3517 # or just forget etc).
3518 3518 if parent == node:
3519 3519 dsmodified = modified
3520 3520 dsadded = added
3521 3521 dsremoved = removed
3522 3522 # store all local modifications, useful later for rename detection
3523 3523 localchanges = dsmodified | dsadded
3524 3524 modified, added, removed = set(), set(), set()
3525 3525 else:
3526 3526 changes = repo.status(node1=parent, match=m)
3527 3527 dsmodified = set(changes.modified)
3528 3528 dsadded = set(changes.added)
3529 3529 dsremoved = set(changes.removed)
3530 3530 # store all local modifications, useful later for rename detection
3531 3531 localchanges = dsmodified | dsadded
3532 3532
3533 3533 # only take into account for removes between wc and target
3534 3534 clean |= dsremoved - removed
3535 3535 dsremoved &= removed
3536 3536 # distinct between dirstate remove and other
3537 3537 removed -= dsremoved
3538 3538
3539 3539 modadded = added & dsmodified
3540 3540 added -= modadded
3541 3541
3542 3542 # tell newly modified apart.
3543 3543 dsmodified &= modified
3544 3544 dsmodified |= modified & dsadded # dirstate added may need backup
3545 3545 modified -= dsmodified
3546 3546
3547 3547 # We need to wait for some post-processing to update this set
3548 3548 # before making the distinction. The dirstate will be used for
3549 3549 # that purpose.
3550 3550 dsadded = added
3551 3551
3552 3552 # in case of merge, files that are actually added can be reported as
3553 3553 # modified, we need to post process the result
3554 3554 if p2 != nullid:
3555 3555 mergeadd = set(dsmodified)
3556 3556 for path in dsmodified:
3557 3557 if path in mf:
3558 3558 mergeadd.remove(path)
3559 3559 dsadded |= mergeadd
3560 3560 dsmodified -= mergeadd
3561 3561
3562 3562 # if f is a rename, update `names` to also revert the source
3563 3563 cwd = repo.getcwd()
3564 3564 for f in localchanges:
3565 3565 src = repo.dirstate.copied(f)
3566 3566 # XXX should we check for rename down to target node?
3567 3567 if src and src not in names and repo.dirstate[src] == 'r':
3568 3568 dsremoved.add(src)
3569 3569 names[src] = (repo.pathto(src, cwd), True)
3570 3570
3571 3571 # determine the exact nature of the deleted changesets
3572 3572 deladded = set(_deleted)
3573 3573 for path in _deleted:
3574 3574 if path in mf:
3575 3575 deladded.remove(path)
3576 3576 deleted = _deleted - deladded
3577 3577
3578 3578 # distinguish between file to forget and the other
3579 3579 added = set()
3580 3580 for abs in dsadded:
3581 3581 if repo.dirstate[abs] != 'a':
3582 3582 added.add(abs)
3583 3583 dsadded -= added
3584 3584
3585 3585 for abs in deladded:
3586 3586 if repo.dirstate[abs] == 'a':
3587 3587 dsadded.add(abs)
3588 3588 deladded -= dsadded
3589 3589
3590 3590 # For files marked as removed, we check if an unknown file is present at
3591 3591 # the same path. If a such file exists it may need to be backed up.
3592 3592 # Making the distinction at this stage helps have simpler backup
3593 3593 # logic.
3594 3594 removunk = set()
3595 3595 for abs in removed:
3596 3596 target = repo.wjoin(abs)
3597 3597 if os.path.lexists(target):
3598 3598 removunk.add(abs)
3599 3599 removed -= removunk
3600 3600
3601 3601 dsremovunk = set()
3602 3602 for abs in dsremoved:
3603 3603 target = repo.wjoin(abs)
3604 3604 if os.path.lexists(target):
3605 3605 dsremovunk.add(abs)
3606 3606 dsremoved -= dsremovunk
3607 3607
3608 3608 # action to be actually performed by revert
3609 3609 # (<list of file>, message>) tuple
3610 3610 actions = {'revert': ([], _('reverting %s\n')),
3611 3611 'add': ([], _('adding %s\n')),
3612 3612 'remove': ([], _('removing %s\n')),
3613 3613 'drop': ([], _('removing %s\n')),
3614 3614 'forget': ([], _('forgetting %s\n')),
3615 3615 'undelete': ([], _('undeleting %s\n')),
3616 3616 'noop': (None, _('no changes needed to %s\n')),
3617 3617 'unknown': (None, _('file not managed: %s\n')),
3618 3618 }
3619 3619
3620 3620 # "constant" that convey the backup strategy.
3621 3621 # All set to `discard` if `no-backup` is set do avoid checking
3622 3622 # no_backup lower in the code.
3623 3623 # These values are ordered for comparison purposes
3624 3624 backupinteractive = 3 # do backup if interactively modified
3625 3625 backup = 2 # unconditionally do backup
3626 3626 check = 1 # check if the existing file differs from target
3627 3627 discard = 0 # never do backup
3628 3628 if opts.get('no_backup'):
3629 3629 backupinteractive = backup = check = discard
3630 3630 if interactive:
3631 3631 dsmodifiedbackup = backupinteractive
3632 3632 else:
3633 3633 dsmodifiedbackup = backup
3634 3634 tobackup = set()
3635 3635
3636 3636 backupanddel = actions['remove']
3637 3637 if not opts.get('no_backup'):
3638 3638 backupanddel = actions['drop']
3639 3639
3640 3640 disptable = (
3641 3641 # dispatch table:
3642 3642 # file state
3643 3643 # action
3644 3644 # make backup
3645 3645
3646 3646 ## Sets that results that will change file on disk
3647 3647 # Modified compared to target, no local change
3648 3648 (modified, actions['revert'], discard),
3649 3649 # Modified compared to target, but local file is deleted
3650 3650 (deleted, actions['revert'], discard),
3651 3651 # Modified compared to target, local change
3652 3652 (dsmodified, actions['revert'], dsmodifiedbackup),
3653 3653 # Added since target
3654 3654 (added, actions['remove'], discard),
3655 3655 # Added in working directory
3656 3656 (dsadded, actions['forget'], discard),
3657 3657 # Added since target, have local modification
3658 3658 (modadded, backupanddel, backup),
3659 3659 # Added since target but file is missing in working directory
3660 3660 (deladded, actions['drop'], discard),
3661 3661 # Removed since target, before working copy parent
3662 3662 (removed, actions['add'], discard),
3663 3663 # Same as `removed` but an unknown file exists at the same path
3664 3664 (removunk, actions['add'], check),
3665 3665 # Removed since targe, marked as such in working copy parent
3666 3666 (dsremoved, actions['undelete'], discard),
3667 3667 # Same as `dsremoved` but an unknown file exists at the same path
3668 3668 (dsremovunk, actions['undelete'], check),
3669 3669 ## the following sets does not result in any file changes
3670 3670 # File with no modification
3671 3671 (clean, actions['noop'], discard),
3672 3672 # Existing file, not tracked anywhere
3673 3673 (unknown, actions['unknown'], discard),
3674 3674 )
3675 3675
3676 3676 for abs, (rel, exact) in sorted(names.items()):
3677 3677 # target file to be touch on disk (relative to cwd)
3678 3678 target = repo.wjoin(abs)
3679 3679 # search the entry in the dispatch table.
3680 3680 # if the file is in any of these sets, it was touched in the working
3681 3681 # directory parent and we are sure it needs to be reverted.
3682 3682 for table, (xlist, msg), dobackup in disptable:
3683 3683 if abs not in table:
3684 3684 continue
3685 3685 if xlist is not None:
3686 3686 xlist.append(abs)
3687 3687 if dobackup:
3688 3688 # If in interactive mode, don't automatically create
3689 3689 # .orig files (issue4793)
3690 3690 if dobackup == backupinteractive:
3691 3691 tobackup.add(abs)
3692 3692 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3693 3693 bakname = scmutil.origpath(ui, repo, rel)
3694 3694 ui.note(_('saving current version of %s as %s\n') %
3695 3695 (rel, bakname))
3696 3696 if not opts.get('dry_run'):
3697 3697 if interactive:
3698 3698 util.copyfile(target, bakname)
3699 3699 else:
3700 3700 util.rename(target, bakname)
3701 3701 if ui.verbose or not exact:
3702 3702 if not isinstance(msg, bytes):
3703 3703 msg = msg(abs)
3704 3704 ui.status(msg % rel)
3705 3705 elif exact:
3706 3706 ui.warn(msg % rel)
3707 3707 break
3708 3708
3709 3709 if not opts.get('dry_run'):
3710 3710 needdata = ('revert', 'add', 'undelete')
3711 3711 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3712 3712 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3713 3713
3714 3714 if targetsubs:
3715 3715 # Revert the subrepos on the revert list
3716 3716 for sub in targetsubs:
3717 3717 try:
3718 3718 wctx.sub(sub).revert(ctx.substate[sub], *pats,
3719 3719 **pycompat.strkwargs(opts))
3720 3720 except KeyError:
3721 3721 raise error.Abort("subrepository '%s' does not exist in %s!"
3722 3722 % (sub, short(ctx.node())))
3723 3723
3724 3724 def _revertprefetch(repo, ctx, *files):
3725 3725 """Let extension changing the storage layer prefetch content"""
3726 3726
3727 3727 def _performrevert(repo, parents, ctx, actions, interactive=False,
3728 3728 tobackup=None):
3729 3729 """function that actually perform all the actions computed for revert
3730 3730
3731 3731 This is an independent function to let extension to plug in and react to
3732 3732 the imminent revert.
3733 3733
3734 3734 Make sure you have the working directory locked when calling this function.
3735 3735 """
3736 3736 parent, p2 = parents
3737 3737 node = ctx.node()
3738 3738 excluded_files = []
3739 3739 matcher_opts = {"exclude": excluded_files}
3740 3740
3741 3741 def checkout(f):
3742 3742 fc = ctx[f]
3743 3743 repo.wwrite(f, fc.data(), fc.flags())
3744 3744
3745 3745 def doremove(f):
3746 3746 try:
3747 3747 repo.wvfs.unlinkpath(f)
3748 3748 except OSError:
3749 3749 pass
3750 3750 repo.dirstate.remove(f)
3751 3751
3752 3752 audit_path = pathutil.pathauditor(repo.root, cached=True)
3753 3753 for f in actions['forget'][0]:
3754 3754 if interactive:
3755 3755 choice = repo.ui.promptchoice(
3756 3756 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
3757 3757 if choice == 0:
3758 3758 repo.dirstate.drop(f)
3759 3759 else:
3760 3760 excluded_files.append(repo.wjoin(f))
3761 3761 else:
3762 3762 repo.dirstate.drop(f)
3763 3763 for f in actions['remove'][0]:
3764 3764 audit_path(f)
3765 3765 if interactive:
3766 3766 choice = repo.ui.promptchoice(
3767 3767 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
3768 3768 if choice == 0:
3769 3769 doremove(f)
3770 3770 else:
3771 3771 excluded_files.append(repo.wjoin(f))
3772 3772 else:
3773 3773 doremove(f)
3774 3774 for f in actions['drop'][0]:
3775 3775 audit_path(f)
3776 3776 repo.dirstate.remove(f)
3777 3777
3778 3778 normal = None
3779 3779 if node == parent:
3780 3780 # We're reverting to our parent. If possible, we'd like status
3781 3781 # to report the file as clean. We have to use normallookup for
3782 3782 # merges to avoid losing information about merged/dirty files.
3783 3783 if p2 != nullid:
3784 3784 normal = repo.dirstate.normallookup
3785 3785 else:
3786 3786 normal = repo.dirstate.normal
3787 3787
3788 3788 newlyaddedandmodifiedfiles = set()
3789 3789 if interactive:
3790 3790 # Prompt the user for changes to revert
3791 3791 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3792 3792 m = scmutil.match(ctx, torevert, matcher_opts)
3793 3793 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3794 3794 diffopts.nodates = True
3795 3795 diffopts.git = True
3796 3796 operation = 'discard'
3797 3797 reversehunks = True
3798 3798 if node != parent:
3799 3799 operation = 'apply'
3800 3800 reversehunks = False
3801 3801 if reversehunks:
3802 3802 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3803 3803 else:
3804 3804 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3805 3805 originalchunks = patch.parsepatch(diff)
3806 3806
3807 3807 try:
3808 3808
3809 3809 chunks, opts = recordfilter(repo.ui, originalchunks,
3810 3810 operation=operation)
3811 3811 if reversehunks:
3812 3812 chunks = patch.reversehunks(chunks)
3813 3813
3814 3814 except error.PatchError as err:
3815 3815 raise error.Abort(_('error parsing patch: %s') % err)
3816 3816
3817 3817 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3818 3818 if tobackup is None:
3819 3819 tobackup = set()
3820 3820 # Apply changes
3821 3821 fp = stringio()
3822 3822 for c in chunks:
3823 3823 # Create a backup file only if this hunk should be backed up
3824 3824 if ishunk(c) and c.header.filename() in tobackup:
3825 3825 abs = c.header.filename()
3826 3826 target = repo.wjoin(abs)
3827 3827 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3828 3828 util.copyfile(target, bakname)
3829 3829 tobackup.remove(abs)
3830 3830 c.write(fp)
3831 3831 dopatch = fp.tell()
3832 3832 fp.seek(0)
3833 3833 if dopatch:
3834 3834 try:
3835 3835 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3836 3836 except error.PatchError as err:
3837 3837 raise error.Abort(str(err))
3838 3838 del fp
3839 3839 else:
3840 3840 for f in actions['revert'][0]:
3841 3841 checkout(f)
3842 3842 if normal:
3843 3843 normal(f)
3844 3844
3845 3845 for f in actions['add'][0]:
3846 3846 # Don't checkout modified files, they are already created by the diff
3847 3847 if f not in newlyaddedandmodifiedfiles:
3848 3848 checkout(f)
3849 3849 repo.dirstate.add(f)
3850 3850
3851 3851 normal = repo.dirstate.normallookup
3852 3852 if node == parent and p2 == nullid:
3853 3853 normal = repo.dirstate.normal
3854 3854 for f in actions['undelete'][0]:
3855 3855 checkout(f)
3856 3856 normal(f)
3857 3857
3858 3858 copied = copies.pathcopies(repo[parent], ctx)
3859 3859
3860 3860 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3861 3861 if f in copied:
3862 3862 repo.dirstate.copy(copied[f], f)
3863 3863
3864 3864 class command(registrar.command):
3865 3865 """deprecated: used registrar.command instead"""
3866 3866 def _doregister(self, func, name, *args, **kwargs):
3867 3867 func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
3868 3868 return super(command, self)._doregister(func, name, *args, **kwargs)
3869 3869
3870 3870 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3871 3871 # commands.outgoing. "missing" is "missing" of the result of
3872 3872 # "findcommonoutgoing()"
3873 3873 outgoinghooks = util.hooks()
3874 3874
3875 3875 # a list of (ui, repo) functions called by commands.summary
3876 3876 summaryhooks = util.hooks()
3877 3877
3878 3878 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3879 3879 #
3880 3880 # functions should return tuple of booleans below, if 'changes' is None:
3881 3881 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3882 3882 #
3883 3883 # otherwise, 'changes' is a tuple of tuples below:
3884 3884 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3885 3885 # - (desturl, destbranch, destpeer, outgoing)
3886 3886 summaryremotehooks = util.hooks()
3887 3887
3888 3888 # A list of state files kept by multistep operations like graft.
3889 3889 # Since graft cannot be aborted, it is considered 'clearable' by update.
3890 3890 # note: bisect is intentionally excluded
3891 3891 # (state file, clearable, allowcommit, error, hint)
3892 3892 unfinishedstates = [
3893 3893 ('graftstate', True, False, _('graft in progress'),
3894 3894 _("use 'hg graft --continue' or 'hg update' to abort")),
3895 3895 ('updatestate', True, False, _('last update was interrupted'),
3896 3896 _("use 'hg update' to get a consistent checkout"))
3897 3897 ]
3898 3898
3899 3899 def checkunfinished(repo, commit=False):
3900 3900 '''Look for an unfinished multistep operation, like graft, and abort
3901 3901 if found. It's probably good to check this right before
3902 3902 bailifchanged().
3903 3903 '''
3904 3904 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3905 3905 if commit and allowcommit:
3906 3906 continue
3907 3907 if repo.vfs.exists(f):
3908 3908 raise error.Abort(msg, hint=hint)
3909 3909
3910 3910 def clearunfinished(repo):
3911 3911 '''Check for unfinished operations (as above), and clear the ones
3912 3912 that are clearable.
3913 3913 '''
3914 3914 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3915 3915 if not clearable and repo.vfs.exists(f):
3916 3916 raise error.Abort(msg, hint=hint)
3917 3917 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3918 3918 if clearable and repo.vfs.exists(f):
3919 3919 util.unlink(repo.vfs.join(f))
3920 3920
3921 3921 afterresolvedstates = [
3922 3922 ('graftstate',
3923 3923 _('hg graft --continue')),
3924 3924 ]
3925 3925
3926 3926 def howtocontinue(repo):
3927 3927 '''Check for an unfinished operation and return the command to finish
3928 3928 it.
3929 3929
3930 3930 afterresolvedstates tuples define a .hg/{file} and the corresponding
3931 3931 command needed to finish it.
3932 3932
3933 3933 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3934 3934 a boolean.
3935 3935 '''
3936 3936 contmsg = _("continue: %s")
3937 3937 for f, msg in afterresolvedstates:
3938 3938 if repo.vfs.exists(f):
3939 3939 return contmsg % msg, True
3940 3940 if repo[None].dirty(missing=True, merge=False, branch=False):
3941 3941 return contmsg % _("hg commit"), False
3942 3942 return None, None
3943 3943
3944 3944 def checkafterresolved(repo):
3945 3945 '''Inform the user about the next action after completing hg resolve
3946 3946
3947 3947 If there's a matching afterresolvedstates, howtocontinue will yield
3948 3948 repo.ui.warn as the reporter.
3949 3949
3950 3950 Otherwise, it will yield repo.ui.note.
3951 3951 '''
3952 3952 msg, warning = howtocontinue(repo)
3953 3953 if msg is not None:
3954 3954 if warning:
3955 3955 repo.ui.warn("%s\n" % msg)
3956 3956 else:
3957 3957 repo.ui.note("%s\n" % msg)
3958 3958
3959 3959 def wrongtooltocontinue(repo, task):
3960 3960 '''Raise an abort suggesting how to properly continue if there is an
3961 3961 active task.
3962 3962
3963 3963 Uses howtocontinue() to find the active task.
3964 3964
3965 3965 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3966 3966 a hint.
3967 3967 '''
3968 3968 after = howtocontinue(repo)
3969 3969 hint = None
3970 3970 if after[1]:
3971 3971 hint = after[0]
3972 3972 raise error.Abort(_('no %s in progress') % task, hint=hint)
@@ -1,518 +1,520 b''
1 1 # utility for color output for Mercurial commands
2 2 #
3 3 # Copyright (C) 2007 Kevin Christen <kevin.christen@gmail.com> and other
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import re
11 11
12 12 from .i18n import _
13 13
14 14 from . import (
15 15 encoding,
16 16 pycompat,
17 17 util
18 18 )
19 19
20 20 try:
21 21 import curses
22 22 # Mapping from effect name to terminfo attribute name (or raw code) or
23 23 # color number. This will also force-load the curses module.
24 24 _baseterminfoparams = {
25 25 'none': (True, 'sgr0', ''),
26 26 'standout': (True, 'smso', ''),
27 27 'underline': (True, 'smul', ''),
28 28 'reverse': (True, 'rev', ''),
29 29 'inverse': (True, 'rev', ''),
30 30 'blink': (True, 'blink', ''),
31 31 'dim': (True, 'dim', ''),
32 32 'bold': (True, 'bold', ''),
33 33 'invisible': (True, 'invis', ''),
34 34 'italic': (True, 'sitm', ''),
35 35 'black': (False, curses.COLOR_BLACK, ''),
36 36 'red': (False, curses.COLOR_RED, ''),
37 37 'green': (False, curses.COLOR_GREEN, ''),
38 38 'yellow': (False, curses.COLOR_YELLOW, ''),
39 39 'blue': (False, curses.COLOR_BLUE, ''),
40 40 'magenta': (False, curses.COLOR_MAGENTA, ''),
41 41 'cyan': (False, curses.COLOR_CYAN, ''),
42 42 'white': (False, curses.COLOR_WHITE, ''),
43 43 }
44 44 except ImportError:
45 45 curses = None
46 46 _baseterminfoparams = {}
47 47
48 48 # start and stop parameters for effects
49 49 _effects = {
50 50 'none': 0,
51 51 'black': 30,
52 52 'red': 31,
53 53 'green': 32,
54 54 'yellow': 33,
55 55 'blue': 34,
56 56 'magenta': 35,
57 57 'cyan': 36,
58 58 'white': 37,
59 59 'bold': 1,
60 60 'italic': 3,
61 61 'underline': 4,
62 62 'inverse': 7,
63 63 'dim': 2,
64 64 'black_background': 40,
65 65 'red_background': 41,
66 66 'green_background': 42,
67 67 'yellow_background': 43,
68 68 'blue_background': 44,
69 69 'purple_background': 45,
70 70 'cyan_background': 46,
71 71 'white_background': 47,
72 72 }
73 73
74 74 _defaultstyles = {
75 75 'grep.match': 'red bold',
76 76 'grep.linenumber': 'green',
77 77 'grep.rev': 'green',
78 78 'grep.change': 'green',
79 79 'grep.sep': 'cyan',
80 80 'grep.filename': 'magenta',
81 81 'grep.user': 'magenta',
82 82 'grep.date': 'magenta',
83 83 'bookmarks.active': 'green',
84 84 'branches.active': 'none',
85 85 'branches.closed': 'black bold',
86 86 'branches.current': 'green',
87 87 'branches.inactive': 'none',
88 88 'diff.changed': 'white',
89 89 'diff.deleted': 'red',
90 'diff.deleted.highlight': 'red bold underline',
90 91 'diff.diffline': 'bold',
91 92 'diff.extended': 'cyan bold',
92 93 'diff.file_a': 'red bold',
93 94 'diff.file_b': 'green bold',
94 95 'diff.hunk': 'magenta',
95 96 'diff.inserted': 'green',
97 'diff.inserted.highlight': 'green bold underline',
96 98 'diff.tab': '',
97 99 'diff.trailingwhitespace': 'bold red_background',
98 100 'changeset.public': '',
99 101 'changeset.draft': '',
100 102 'changeset.secret': '',
101 103 'diffstat.deleted': 'red',
102 104 'diffstat.inserted': 'green',
103 105 'histedit.remaining': 'red bold',
104 106 'ui.prompt': 'yellow',
105 107 'log.changeset': 'yellow',
106 108 'patchbomb.finalsummary': '',
107 109 'patchbomb.from': 'magenta',
108 110 'patchbomb.to': 'cyan',
109 111 'patchbomb.subject': 'green',
110 112 'patchbomb.diffstats': '',
111 113 'rebase.rebased': 'blue',
112 114 'rebase.remaining': 'red bold',
113 115 'resolve.resolved': 'green bold',
114 116 'resolve.unresolved': 'red bold',
115 117 'shelve.age': 'cyan',
116 118 'shelve.newest': 'green bold',
117 119 'shelve.name': 'blue bold',
118 120 'status.added': 'green bold',
119 121 'status.clean': 'none',
120 122 'status.copied': 'none',
121 123 'status.deleted': 'cyan bold underline',
122 124 'status.ignored': 'black bold',
123 125 'status.modified': 'blue bold',
124 126 'status.removed': 'red bold',
125 127 'status.unknown': 'magenta bold underline',
126 128 'tags.normal': 'green',
127 129 'tags.local': 'black bold',
128 130 }
129 131
130 132 def loadcolortable(ui, extname, colortable):
131 133 _defaultstyles.update(colortable)
132 134
133 135 def _terminfosetup(ui, mode, formatted):
134 136 '''Initialize terminfo data and the terminal if we're in terminfo mode.'''
135 137
136 138 # If we failed to load curses, we go ahead and return.
137 139 if curses is None:
138 140 return
139 141 # Otherwise, see what the config file says.
140 142 if mode not in ('auto', 'terminfo'):
141 143 return
142 144 ui._terminfoparams.update(_baseterminfoparams)
143 145
144 146 for key, val in ui.configitems('color'):
145 147 if key.startswith('color.'):
146 148 newval = (False, int(val), '')
147 149 ui._terminfoparams[key[6:]] = newval
148 150 elif key.startswith('terminfo.'):
149 151 newval = (True, '', val.replace('\\E', '\x1b'))
150 152 ui._terminfoparams[key[9:]] = newval
151 153 try:
152 154 curses.setupterm()
153 155 except curses.error as e:
154 156 ui._terminfoparams.clear()
155 157 return
156 158
157 159 for key, (b, e, c) in ui._terminfoparams.items():
158 160 if not b:
159 161 continue
160 162 if not c and not curses.tigetstr(e):
161 163 # Most terminals don't support dim, invis, etc, so don't be
162 164 # noisy and use ui.debug().
163 165 ui.debug("no terminfo entry for %s\n" % e)
164 166 del ui._terminfoparams[key]
165 167 if not curses.tigetstr('setaf') or not curses.tigetstr('setab'):
166 168 # Only warn about missing terminfo entries if we explicitly asked for
167 169 # terminfo mode and we're in a formatted terminal.
168 170 if mode == "terminfo" and formatted:
169 171 ui.warn(_("no terminfo entry for setab/setaf: reverting to "
170 172 "ECMA-48 color\n"))
171 173 ui._terminfoparams.clear()
172 174
173 175 def setup(ui):
174 176 """configure color on a ui
175 177
176 178 That function both set the colormode for the ui object and read
177 179 the configuration looking for custom colors and effect definitions."""
178 180 mode = _modesetup(ui)
179 181 ui._colormode = mode
180 182 if mode and mode != 'debug':
181 183 configstyles(ui)
182 184
183 185 def _modesetup(ui):
184 186 if ui.plain('color'):
185 187 return None
186 188 config = ui.config('ui', 'color')
187 189 if config == 'debug':
188 190 return 'debug'
189 191
190 192 auto = (config == 'auto')
191 193 always = False
192 194 if not auto and util.parsebool(config):
193 195 # We want the config to behave like a boolean, "on" is actually auto,
194 196 # but "always" value is treated as a special case to reduce confusion.
195 197 if ui.configsource('ui', 'color') == '--color' or config == 'always':
196 198 always = True
197 199 else:
198 200 auto = True
199 201
200 202 if not always and not auto:
201 203 return None
202 204
203 205 formatted = (always or (encoding.environ.get('TERM') != 'dumb'
204 206 and ui.formatted()))
205 207
206 208 mode = ui.config('color', 'mode')
207 209
208 210 # If pager is active, color.pagermode overrides color.mode.
209 211 if getattr(ui, 'pageractive', False):
210 212 mode = ui.config('color', 'pagermode', mode)
211 213
212 214 realmode = mode
213 215 if pycompat.iswindows:
214 216 from . import win32
215 217
216 218 term = encoding.environ.get('TERM')
217 219 # TERM won't be defined in a vanilla cmd.exe environment.
218 220
219 221 # UNIX-like environments on Windows such as Cygwin and MSYS will
220 222 # set TERM. They appear to make a best effort attempt at setting it
221 223 # to something appropriate. However, not all environments with TERM
222 224 # defined support ANSI.
223 225 ansienviron = term and 'xterm' in term
224 226
225 227 if mode == 'auto':
226 228 # Since "ansi" could result in terminal gibberish, we error on the
227 229 # side of selecting "win32". However, if w32effects is not defined,
228 230 # we almost certainly don't support "win32", so don't even try.
229 231 # w32ffects is not populated when stdout is redirected, so checking
230 232 # it first avoids win32 calls in a state known to error out.
231 233 if ansienviron or not w32effects or win32.enablevtmode():
232 234 realmode = 'ansi'
233 235 else:
234 236 realmode = 'win32'
235 237 # An empty w32effects is a clue that stdout is redirected, and thus
236 238 # cannot enable VT mode.
237 239 elif mode == 'ansi' and w32effects and not ansienviron:
238 240 win32.enablevtmode()
239 241 elif mode == 'auto':
240 242 realmode = 'ansi'
241 243
242 244 def modewarn():
243 245 # only warn if color.mode was explicitly set and we're in
244 246 # a formatted terminal
245 247 if mode == realmode and formatted:
246 248 ui.warn(_('warning: failed to set color mode to %s\n') % mode)
247 249
248 250 if realmode == 'win32':
249 251 ui._terminfoparams.clear()
250 252 if not w32effects:
251 253 modewarn()
252 254 return None
253 255 elif realmode == 'ansi':
254 256 ui._terminfoparams.clear()
255 257 elif realmode == 'terminfo':
256 258 _terminfosetup(ui, mode, formatted)
257 259 if not ui._terminfoparams:
258 260 ## FIXME Shouldn't we return None in this case too?
259 261 modewarn()
260 262 realmode = 'ansi'
261 263 else:
262 264 return None
263 265
264 266 if always or (auto and formatted):
265 267 return realmode
266 268 return None
267 269
268 270 def configstyles(ui):
269 271 ui._styles.update(_defaultstyles)
270 272 for status, cfgeffects in ui.configitems('color'):
271 273 if '.' not in status or status.startswith(('color.', 'terminfo.')):
272 274 continue
273 275 cfgeffects = ui.configlist('color', status)
274 276 if cfgeffects:
275 277 good = []
276 278 for e in cfgeffects:
277 279 if valideffect(ui, e):
278 280 good.append(e)
279 281 else:
280 282 ui.warn(_("ignoring unknown color/effect %r "
281 283 "(configured in color.%s)\n")
282 284 % (e, status))
283 285 ui._styles[status] = ' '.join(good)
284 286
285 287 def _activeeffects(ui):
286 288 '''Return the effects map for the color mode set on the ui.'''
287 289 if ui._colormode == 'win32':
288 290 return w32effects
289 291 elif ui._colormode is not None:
290 292 return _effects
291 293 return {}
292 294
293 295 def valideffect(ui, effect):
294 296 'Determine if the effect is valid or not.'
295 297 return ((not ui._terminfoparams and effect in _activeeffects(ui))
296 298 or (effect in ui._terminfoparams
297 299 or effect[:-11] in ui._terminfoparams))
298 300
299 301 def _effect_str(ui, effect):
300 302 '''Helper function for render_effects().'''
301 303
302 304 bg = False
303 305 if effect.endswith('_background'):
304 306 bg = True
305 307 effect = effect[:-11]
306 308 try:
307 309 attr, val, termcode = ui._terminfoparams[effect]
308 310 except KeyError:
309 311 return ''
310 312 if attr:
311 313 if termcode:
312 314 return termcode
313 315 else:
314 316 return curses.tigetstr(val)
315 317 elif bg:
316 318 return curses.tparm(curses.tigetstr('setab'), val)
317 319 else:
318 320 return curses.tparm(curses.tigetstr('setaf'), val)
319 321
320 322 def _mergeeffects(text, start, stop):
321 323 """Insert start sequence at every occurrence of stop sequence
322 324
323 325 >>> s = _mergeeffects(b'cyan', b'[C]', b'|')
324 326 >>> s = _mergeeffects(s + b'yellow', b'[Y]', b'|')
325 327 >>> s = _mergeeffects(b'ma' + s + b'genta', b'[M]', b'|')
326 328 >>> s = _mergeeffects(b'red' + s, b'[R]', b'|')
327 329 >>> s
328 330 '[R]red[M]ma[Y][C]cyan|[R][M][Y]yellow|[R][M]genta|'
329 331 """
330 332 parts = []
331 333 for t in text.split(stop):
332 334 if not t:
333 335 continue
334 336 parts.extend([start, t, stop])
335 337 return ''.join(parts)
336 338
337 339 def _render_effects(ui, text, effects):
338 340 'Wrap text in commands to turn on each effect.'
339 341 if not text:
340 342 return text
341 343 if ui._terminfoparams:
342 344 start = ''.join(_effect_str(ui, effect)
343 345 for effect in ['none'] + effects.split())
344 346 stop = _effect_str(ui, 'none')
345 347 else:
346 348 activeeffects = _activeeffects(ui)
347 349 start = [pycompat.bytestr(activeeffects[e])
348 350 for e in ['none'] + effects.split()]
349 351 start = '\033[' + ';'.join(start) + 'm'
350 352 stop = '\033[' + pycompat.bytestr(activeeffects['none']) + 'm'
351 353 return _mergeeffects(text, start, stop)
352 354
353 355 _ansieffectre = re.compile(br'\x1b\[[0-9;]*m')
354 356
355 357 def stripeffects(text):
356 358 """Strip ANSI control codes which could be inserted by colorlabel()"""
357 359 return _ansieffectre.sub('', text)
358 360
359 361 def colorlabel(ui, msg, label):
360 362 """add color control code according to the mode"""
361 363 if ui._colormode == 'debug':
362 364 if label and msg:
363 365 if msg[-1] == '\n':
364 366 msg = "[%s|%s]\n" % (label, msg[:-1])
365 367 else:
366 368 msg = "[%s|%s]" % (label, msg)
367 369 elif ui._colormode is not None:
368 370 effects = []
369 371 for l in label.split():
370 372 s = ui._styles.get(l, '')
371 373 if s:
372 374 effects.append(s)
373 375 elif valideffect(ui, l):
374 376 effects.append(l)
375 377 effects = ' '.join(effects)
376 378 if effects:
377 379 msg = '\n'.join([_render_effects(ui, line, effects)
378 380 for line in msg.split('\n')])
379 381 return msg
380 382
381 383 w32effects = None
382 384 if pycompat.iswindows:
383 385 import ctypes
384 386
385 387 _kernel32 = ctypes.windll.kernel32
386 388
387 389 _WORD = ctypes.c_ushort
388 390
389 391 _INVALID_HANDLE_VALUE = -1
390 392
391 393 class _COORD(ctypes.Structure):
392 394 _fields_ = [('X', ctypes.c_short),
393 395 ('Y', ctypes.c_short)]
394 396
395 397 class _SMALL_RECT(ctypes.Structure):
396 398 _fields_ = [('Left', ctypes.c_short),
397 399 ('Top', ctypes.c_short),
398 400 ('Right', ctypes.c_short),
399 401 ('Bottom', ctypes.c_short)]
400 402
401 403 class _CONSOLE_SCREEN_BUFFER_INFO(ctypes.Structure):
402 404 _fields_ = [('dwSize', _COORD),
403 405 ('dwCursorPosition', _COORD),
404 406 ('wAttributes', _WORD),
405 407 ('srWindow', _SMALL_RECT),
406 408 ('dwMaximumWindowSize', _COORD)]
407 409
408 410 _STD_OUTPUT_HANDLE = 0xfffffff5 # (DWORD)-11
409 411 _STD_ERROR_HANDLE = 0xfffffff4 # (DWORD)-12
410 412
411 413 _FOREGROUND_BLUE = 0x0001
412 414 _FOREGROUND_GREEN = 0x0002
413 415 _FOREGROUND_RED = 0x0004
414 416 _FOREGROUND_INTENSITY = 0x0008
415 417
416 418 _BACKGROUND_BLUE = 0x0010
417 419 _BACKGROUND_GREEN = 0x0020
418 420 _BACKGROUND_RED = 0x0040
419 421 _BACKGROUND_INTENSITY = 0x0080
420 422
421 423 _COMMON_LVB_REVERSE_VIDEO = 0x4000
422 424 _COMMON_LVB_UNDERSCORE = 0x8000
423 425
424 426 # http://msdn.microsoft.com/en-us/library/ms682088%28VS.85%29.aspx
425 427 w32effects = {
426 428 'none': -1,
427 429 'black': 0,
428 430 'red': _FOREGROUND_RED,
429 431 'green': _FOREGROUND_GREEN,
430 432 'yellow': _FOREGROUND_RED | _FOREGROUND_GREEN,
431 433 'blue': _FOREGROUND_BLUE,
432 434 'magenta': _FOREGROUND_BLUE | _FOREGROUND_RED,
433 435 'cyan': _FOREGROUND_BLUE | _FOREGROUND_GREEN,
434 436 'white': _FOREGROUND_RED | _FOREGROUND_GREEN | _FOREGROUND_BLUE,
435 437 'bold': _FOREGROUND_INTENSITY,
436 438 'black_background': 0x100, # unused value > 0x0f
437 439 'red_background': _BACKGROUND_RED,
438 440 'green_background': _BACKGROUND_GREEN,
439 441 'yellow_background': _BACKGROUND_RED | _BACKGROUND_GREEN,
440 442 'blue_background': _BACKGROUND_BLUE,
441 443 'purple_background': _BACKGROUND_BLUE | _BACKGROUND_RED,
442 444 'cyan_background': _BACKGROUND_BLUE | _BACKGROUND_GREEN,
443 445 'white_background': (_BACKGROUND_RED | _BACKGROUND_GREEN |
444 446 _BACKGROUND_BLUE),
445 447 'bold_background': _BACKGROUND_INTENSITY,
446 448 'underline': _COMMON_LVB_UNDERSCORE, # double-byte charsets only
447 449 'inverse': _COMMON_LVB_REVERSE_VIDEO, # double-byte charsets only
448 450 }
449 451
450 452 passthrough = {_FOREGROUND_INTENSITY,
451 453 _BACKGROUND_INTENSITY,
452 454 _COMMON_LVB_UNDERSCORE,
453 455 _COMMON_LVB_REVERSE_VIDEO}
454 456
455 457 stdout = _kernel32.GetStdHandle(
456 458 _STD_OUTPUT_HANDLE) # don't close the handle returned
457 459 if stdout is None or stdout == _INVALID_HANDLE_VALUE:
458 460 w32effects = None
459 461 else:
460 462 csbi = _CONSOLE_SCREEN_BUFFER_INFO()
461 463 if not _kernel32.GetConsoleScreenBufferInfo(
462 464 stdout, ctypes.byref(csbi)):
463 465 # stdout may not support GetConsoleScreenBufferInfo()
464 466 # when called from subprocess or redirected
465 467 w32effects = None
466 468 else:
467 469 origattr = csbi.wAttributes
468 470 ansire = re.compile('\033\[([^m]*)m([^\033]*)(.*)',
469 471 re.MULTILINE | re.DOTALL)
470 472
471 473 def win32print(ui, writefunc, *msgs, **opts):
472 474 for text in msgs:
473 475 _win32print(ui, text, writefunc, **opts)
474 476
475 477 def _win32print(ui, text, writefunc, **opts):
476 478 label = opts.get('label', '')
477 479 attr = origattr
478 480
479 481 def mapcolor(val, attr):
480 482 if val == -1:
481 483 return origattr
482 484 elif val in passthrough:
483 485 return attr | val
484 486 elif val > 0x0f:
485 487 return (val & 0x70) | (attr & 0x8f)
486 488 else:
487 489 return (val & 0x07) | (attr & 0xf8)
488 490
489 491 # determine console attributes based on labels
490 492 for l in label.split():
491 493 style = ui._styles.get(l, '')
492 494 for effect in style.split():
493 495 try:
494 496 attr = mapcolor(w32effects[effect], attr)
495 497 except KeyError:
496 498 # w32effects could not have certain attributes so we skip
497 499 # them if not found
498 500 pass
499 501 # hack to ensure regexp finds data
500 502 if not text.startswith('\033['):
501 503 text = '\033[m' + text
502 504
503 505 # Look for ANSI-like codes embedded in text
504 506 m = re.match(ansire, text)
505 507
506 508 try:
507 509 while m:
508 510 for sattr in m.group(1).split(';'):
509 511 if sattr:
510 512 attr = mapcolor(int(sattr), attr)
511 513 ui.flush()
512 514 _kernel32.SetConsoleTextAttribute(stdout, attr)
513 515 writefunc(m.group(2), **opts)
514 516 m = re.match(ansire, m.group(3))
515 517 finally:
516 518 # Explicitly reset original attributes
517 519 ui.flush()
518 520 _kernel32.SetConsoleTextAttribute(stdout, origattr)
@@ -1,1271 +1,1274 b''
1 1 # configitems.py - centralized declaration of configuration option
2 2 #
3 3 # Copyright 2017 Pierre-Yves David <pierre-yves.david@octobus.net>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import functools
11 11 import re
12 12
13 13 from . import (
14 14 encoding,
15 15 error,
16 16 )
17 17
18 18 def loadconfigtable(ui, extname, configtable):
19 19 """update config item known to the ui with the extension ones"""
20 20 for section, items in configtable.items():
21 21 knownitems = ui._knownconfig.setdefault(section, itemregister())
22 22 knownkeys = set(knownitems)
23 23 newkeys = set(items)
24 24 for key in sorted(knownkeys & newkeys):
25 25 msg = "extension '%s' overwrite config item '%s.%s'"
26 26 msg %= (extname, section, key)
27 27 ui.develwarn(msg, config='warn-config')
28 28
29 29 knownitems.update(items)
30 30
31 31 class configitem(object):
32 32 """represent a known config item
33 33
34 34 :section: the official config section where to find this item,
35 35 :name: the official name within the section,
36 36 :default: default value for this item,
37 37 :alias: optional list of tuples as alternatives,
38 38 :generic: this is a generic definition, match name using regular expression.
39 39 """
40 40
41 41 def __init__(self, section, name, default=None, alias=(),
42 42 generic=False, priority=0):
43 43 self.section = section
44 44 self.name = name
45 45 self.default = default
46 46 self.alias = list(alias)
47 47 self.generic = generic
48 48 self.priority = priority
49 49 self._re = None
50 50 if generic:
51 51 self._re = re.compile(self.name)
52 52
53 53 class itemregister(dict):
54 54 """A specialized dictionary that can handle wild-card selection"""
55 55
56 56 def __init__(self):
57 57 super(itemregister, self).__init__()
58 58 self._generics = set()
59 59
60 60 def update(self, other):
61 61 super(itemregister, self).update(other)
62 62 self._generics.update(other._generics)
63 63
64 64 def __setitem__(self, key, item):
65 65 super(itemregister, self).__setitem__(key, item)
66 66 if item.generic:
67 67 self._generics.add(item)
68 68
69 69 def get(self, key):
70 70 baseitem = super(itemregister, self).get(key)
71 71 if baseitem is not None and not baseitem.generic:
72 72 return baseitem
73 73
74 74 # search for a matching generic item
75 75 generics = sorted(self._generics, key=(lambda x: (x.priority, x.name)))
76 76 for item in generics:
77 77 # we use 'match' instead of 'search' to make the matching simpler
78 78 # for people unfamiliar with regular expression. Having the match
79 79 # rooted to the start of the string will produce less surprising
80 80 # result for user writing simple regex for sub-attribute.
81 81 #
82 82 # For example using "color\..*" match produces an unsurprising
83 83 # result, while using search could suddenly match apparently
84 84 # unrelated configuration that happens to contains "color."
85 85 # anywhere. This is a tradeoff where we favor requiring ".*" on
86 86 # some match to avoid the need to prefix most pattern with "^".
87 87 # The "^" seems more error prone.
88 88 if item._re.match(key):
89 89 return item
90 90
91 91 return None
92 92
93 93 coreitems = {}
94 94
95 95 def _register(configtable, *args, **kwargs):
96 96 item = configitem(*args, **kwargs)
97 97 section = configtable.setdefault(item.section, itemregister())
98 98 if item.name in section:
99 99 msg = "duplicated config item registration for '%s.%s'"
100 100 raise error.ProgrammingError(msg % (item.section, item.name))
101 101 section[item.name] = item
102 102
103 103 # special value for case where the default is derived from other values
104 104 dynamicdefault = object()
105 105
106 106 # Registering actual config items
107 107
108 108 def getitemregister(configtable):
109 109 f = functools.partial(_register, configtable)
110 110 # export pseudo enum as configitem.*
111 111 f.dynamicdefault = dynamicdefault
112 112 return f
113 113
114 114 coreconfigitem = getitemregister(coreitems)
115 115
116 116 coreconfigitem('alias', '.*',
117 117 default=None,
118 118 generic=True,
119 119 )
120 120 coreconfigitem('annotate', 'nodates',
121 121 default=False,
122 122 )
123 123 coreconfigitem('annotate', 'showfunc',
124 124 default=False,
125 125 )
126 126 coreconfigitem('annotate', 'unified',
127 127 default=None,
128 128 )
129 129 coreconfigitem('annotate', 'git',
130 130 default=False,
131 131 )
132 132 coreconfigitem('annotate', 'ignorews',
133 133 default=False,
134 134 )
135 135 coreconfigitem('annotate', 'ignorewsamount',
136 136 default=False,
137 137 )
138 138 coreconfigitem('annotate', 'ignoreblanklines',
139 139 default=False,
140 140 )
141 141 coreconfigitem('annotate', 'ignorewseol',
142 142 default=False,
143 143 )
144 144 coreconfigitem('annotate', 'nobinary',
145 145 default=False,
146 146 )
147 147 coreconfigitem('annotate', 'noprefix',
148 148 default=False,
149 149 )
150 150 coreconfigitem('auth', 'cookiefile',
151 151 default=None,
152 152 )
153 153 # bookmarks.pushing: internal hack for discovery
154 154 coreconfigitem('bookmarks', 'pushing',
155 155 default=list,
156 156 )
157 157 # bundle.mainreporoot: internal hack for bundlerepo
158 158 coreconfigitem('bundle', 'mainreporoot',
159 159 default='',
160 160 )
161 161 # bundle.reorder: experimental config
162 162 coreconfigitem('bundle', 'reorder',
163 163 default='auto',
164 164 )
165 165 coreconfigitem('censor', 'policy',
166 166 default='abort',
167 167 )
168 168 coreconfigitem('chgserver', 'idletimeout',
169 169 default=3600,
170 170 )
171 171 coreconfigitem('chgserver', 'skiphash',
172 172 default=False,
173 173 )
174 174 coreconfigitem('cmdserver', 'log',
175 175 default=None,
176 176 )
177 177 coreconfigitem('color', '.*',
178 178 default=None,
179 179 generic=True,
180 180 )
181 181 coreconfigitem('color', 'mode',
182 182 default='auto',
183 183 )
184 184 coreconfigitem('color', 'pagermode',
185 185 default=dynamicdefault,
186 186 )
187 187 coreconfigitem('commands', 'show.aliasprefix',
188 188 default=list,
189 189 )
190 190 coreconfigitem('commands', 'status.relative',
191 191 default=False,
192 192 )
193 193 coreconfigitem('commands', 'status.skipstates',
194 194 default=[],
195 195 )
196 196 coreconfigitem('commands', 'status.verbose',
197 197 default=False,
198 198 )
199 199 coreconfigitem('commands', 'update.check',
200 200 default=None,
201 201 # Deprecated, remove after 4.4 release
202 202 alias=[('experimental', 'updatecheck')]
203 203 )
204 204 coreconfigitem('commands', 'update.requiredest',
205 205 default=False,
206 206 )
207 207 coreconfigitem('committemplate', '.*',
208 208 default=None,
209 209 generic=True,
210 210 )
211 211 coreconfigitem('convert', 'cvsps.cache',
212 212 default=True,
213 213 )
214 214 coreconfigitem('convert', 'cvsps.fuzz',
215 215 default=60,
216 216 )
217 217 coreconfigitem('convert', 'cvsps.logencoding',
218 218 default=None,
219 219 )
220 220 coreconfigitem('convert', 'cvsps.mergefrom',
221 221 default=None,
222 222 )
223 223 coreconfigitem('convert', 'cvsps.mergeto',
224 224 default=None,
225 225 )
226 226 coreconfigitem('convert', 'git.committeractions',
227 227 default=lambda: ['messagedifferent'],
228 228 )
229 229 coreconfigitem('convert', 'git.extrakeys',
230 230 default=list,
231 231 )
232 232 coreconfigitem('convert', 'git.findcopiesharder',
233 233 default=False,
234 234 )
235 235 coreconfigitem('convert', 'git.remoteprefix',
236 236 default='remote',
237 237 )
238 238 coreconfigitem('convert', 'git.renamelimit',
239 239 default=400,
240 240 )
241 241 coreconfigitem('convert', 'git.saverev',
242 242 default=True,
243 243 )
244 244 coreconfigitem('convert', 'git.similarity',
245 245 default=50,
246 246 )
247 247 coreconfigitem('convert', 'git.skipsubmodules',
248 248 default=False,
249 249 )
250 250 coreconfigitem('convert', 'hg.clonebranches',
251 251 default=False,
252 252 )
253 253 coreconfigitem('convert', 'hg.ignoreerrors',
254 254 default=False,
255 255 )
256 256 coreconfigitem('convert', 'hg.revs',
257 257 default=None,
258 258 )
259 259 coreconfigitem('convert', 'hg.saverev',
260 260 default=False,
261 261 )
262 262 coreconfigitem('convert', 'hg.sourcename',
263 263 default=None,
264 264 )
265 265 coreconfigitem('convert', 'hg.startrev',
266 266 default=None,
267 267 )
268 268 coreconfigitem('convert', 'hg.tagsbranch',
269 269 default='default',
270 270 )
271 271 coreconfigitem('convert', 'hg.usebranchnames',
272 272 default=True,
273 273 )
274 274 coreconfigitem('convert', 'ignoreancestorcheck',
275 275 default=False,
276 276 )
277 277 coreconfigitem('convert', 'localtimezone',
278 278 default=False,
279 279 )
280 280 coreconfigitem('convert', 'p4.encoding',
281 281 default=dynamicdefault,
282 282 )
283 283 coreconfigitem('convert', 'p4.startrev',
284 284 default=0,
285 285 )
286 286 coreconfigitem('convert', 'skiptags',
287 287 default=False,
288 288 )
289 289 coreconfigitem('convert', 'svn.debugsvnlog',
290 290 default=True,
291 291 )
292 292 coreconfigitem('convert', 'svn.trunk',
293 293 default=None,
294 294 )
295 295 coreconfigitem('convert', 'svn.tags',
296 296 default=None,
297 297 )
298 298 coreconfigitem('convert', 'svn.branches',
299 299 default=None,
300 300 )
301 301 coreconfigitem('convert', 'svn.startrev',
302 302 default=0,
303 303 )
304 304 coreconfigitem('debug', 'dirstate.delaywrite',
305 305 default=0,
306 306 )
307 307 coreconfigitem('defaults', '.*',
308 308 default=None,
309 309 generic=True,
310 310 )
311 311 coreconfigitem('devel', 'all-warnings',
312 312 default=False,
313 313 )
314 314 coreconfigitem('devel', 'bundle2.debug',
315 315 default=False,
316 316 )
317 317 coreconfigitem('devel', 'cache-vfs',
318 318 default=None,
319 319 )
320 320 coreconfigitem('devel', 'check-locks',
321 321 default=False,
322 322 )
323 323 coreconfigitem('devel', 'check-relroot',
324 324 default=False,
325 325 )
326 326 coreconfigitem('devel', 'default-date',
327 327 default=None,
328 328 )
329 329 coreconfigitem('devel', 'deprec-warn',
330 330 default=False,
331 331 )
332 332 coreconfigitem('devel', 'disableloaddefaultcerts',
333 333 default=False,
334 334 )
335 335 coreconfigitem('devel', 'warn-empty-changegroup',
336 336 default=False,
337 337 )
338 338 coreconfigitem('devel', 'legacy.exchange',
339 339 default=list,
340 340 )
341 341 coreconfigitem('devel', 'servercafile',
342 342 default='',
343 343 )
344 344 coreconfigitem('devel', 'serverexactprotocol',
345 345 default='',
346 346 )
347 347 coreconfigitem('devel', 'serverrequirecert',
348 348 default=False,
349 349 )
350 350 coreconfigitem('devel', 'strip-obsmarkers',
351 351 default=True,
352 352 )
353 353 coreconfigitem('devel', 'warn-config',
354 354 default=None,
355 355 )
356 356 coreconfigitem('devel', 'warn-config-default',
357 357 default=None,
358 358 )
359 359 coreconfigitem('devel', 'user.obsmarker',
360 360 default=None,
361 361 )
362 362 coreconfigitem('devel', 'warn-config-unknown',
363 363 default=None,
364 364 )
365 365 coreconfigitem('diff', 'nodates',
366 366 default=False,
367 367 )
368 368 coreconfigitem('diff', 'showfunc',
369 369 default=False,
370 370 )
371 371 coreconfigitem('diff', 'unified',
372 372 default=None,
373 373 )
374 374 coreconfigitem('diff', 'git',
375 375 default=False,
376 376 )
377 377 coreconfigitem('diff', 'ignorews',
378 378 default=False,
379 379 )
380 380 coreconfigitem('diff', 'ignorewsamount',
381 381 default=False,
382 382 )
383 383 coreconfigitem('diff', 'ignoreblanklines',
384 384 default=False,
385 385 )
386 386 coreconfigitem('diff', 'ignorewseol',
387 387 default=False,
388 388 )
389 389 coreconfigitem('diff', 'nobinary',
390 390 default=False,
391 391 )
392 392 coreconfigitem('diff', 'noprefix',
393 393 default=False,
394 394 )
395 395 coreconfigitem('email', 'bcc',
396 396 default=None,
397 397 )
398 398 coreconfigitem('email', 'cc',
399 399 default=None,
400 400 )
401 401 coreconfigitem('email', 'charsets',
402 402 default=list,
403 403 )
404 404 coreconfigitem('email', 'from',
405 405 default=None,
406 406 )
407 407 coreconfigitem('email', 'method',
408 408 default='smtp',
409 409 )
410 410 coreconfigitem('email', 'reply-to',
411 411 default=None,
412 412 )
413 413 coreconfigitem('email', 'to',
414 414 default=None,
415 415 )
416 416 coreconfigitem('experimental', 'archivemetatemplate',
417 417 default=dynamicdefault,
418 418 )
419 419 coreconfigitem('experimental', 'bundle-phases',
420 420 default=False,
421 421 )
422 422 coreconfigitem('experimental', 'bundle2-advertise',
423 423 default=True,
424 424 )
425 425 coreconfigitem('experimental', 'bundle2-output-capture',
426 426 default=False,
427 427 )
428 428 coreconfigitem('experimental', 'bundle2.pushback',
429 429 default=False,
430 430 )
431 431 coreconfigitem('experimental', 'bundle2lazylocking',
432 432 default=False,
433 433 )
434 434 coreconfigitem('experimental', 'bundlecomplevel',
435 435 default=None,
436 436 )
437 437 coreconfigitem('experimental', 'changegroup3',
438 438 default=False,
439 439 )
440 440 coreconfigitem('experimental', 'clientcompressionengines',
441 441 default=list,
442 442 )
443 443 coreconfigitem('experimental', 'copytrace',
444 444 default='on',
445 445 )
446 446 coreconfigitem('experimental', 'copytrace.movecandidateslimit',
447 447 default=100,
448 448 )
449 449 coreconfigitem('experimental', 'copytrace.sourcecommitlimit',
450 450 default=100,
451 451 )
452 452 coreconfigitem('experimental', 'crecordtest',
453 453 default=None,
454 454 )
455 455 coreconfigitem('experimental', 'editortmpinhg',
456 456 default=False,
457 457 )
458 458 coreconfigitem('experimental', 'evolution',
459 459 default=list,
460 460 )
461 461 coreconfigitem('experimental', 'evolution.allowdivergence',
462 462 default=False,
463 463 alias=[('experimental', 'allowdivergence')]
464 464 )
465 465 coreconfigitem('experimental', 'evolution.allowunstable',
466 466 default=None,
467 467 )
468 468 coreconfigitem('experimental', 'evolution.createmarkers',
469 469 default=None,
470 470 )
471 471 coreconfigitem('experimental', 'evolution.effect-flags',
472 472 default=True,
473 473 alias=[('experimental', 'effect-flags')]
474 474 )
475 475 coreconfigitem('experimental', 'evolution.exchange',
476 476 default=None,
477 477 )
478 478 coreconfigitem('experimental', 'evolution.bundle-obsmarker',
479 479 default=False,
480 480 )
481 481 coreconfigitem('experimental', 'evolution.track-operation',
482 482 default=True,
483 483 )
484 coreconfigitem('experimental', 'worddiff',
485 default=False,
486 )
484 487 coreconfigitem('experimental', 'maxdeltachainspan',
485 488 default=-1,
486 489 )
487 490 coreconfigitem('experimental', 'mmapindexthreshold',
488 491 default=None,
489 492 )
490 493 coreconfigitem('experimental', 'nonnormalparanoidcheck',
491 494 default=False,
492 495 )
493 496 coreconfigitem('experimental', 'exportableenviron',
494 497 default=list,
495 498 )
496 499 coreconfigitem('experimental', 'extendedheader.index',
497 500 default=None,
498 501 )
499 502 coreconfigitem('experimental', 'extendedheader.similarity',
500 503 default=False,
501 504 )
502 505 coreconfigitem('experimental', 'format.compression',
503 506 default='zlib',
504 507 )
505 508 coreconfigitem('experimental', 'graphshorten',
506 509 default=False,
507 510 )
508 511 coreconfigitem('experimental', 'graphstyle.parent',
509 512 default=dynamicdefault,
510 513 )
511 514 coreconfigitem('experimental', 'graphstyle.missing',
512 515 default=dynamicdefault,
513 516 )
514 517 coreconfigitem('experimental', 'graphstyle.grandparent',
515 518 default=dynamicdefault,
516 519 )
517 520 coreconfigitem('experimental', 'hook-track-tags',
518 521 default=False,
519 522 )
520 523 coreconfigitem('experimental', 'httppostargs',
521 524 default=False,
522 525 )
523 526 coreconfigitem('experimental', 'manifestv2',
524 527 default=False,
525 528 )
526 529 coreconfigitem('experimental', 'mergedriver',
527 530 default=None,
528 531 )
529 532 coreconfigitem('experimental', 'obsmarkers-exchange-debug',
530 533 default=False,
531 534 )
532 535 coreconfigitem('experimental', 'rebase.multidest',
533 536 default=False,
534 537 )
535 538 coreconfigitem('experimental', 'remotenames',
536 539 default=False,
537 540 )
538 541 coreconfigitem('experimental', 'revlogv2',
539 542 default=None,
540 543 )
541 544 coreconfigitem('experimental', 'single-head-per-branch',
542 545 default=False,
543 546 )
544 547 coreconfigitem('experimental', 'spacemovesdown',
545 548 default=False,
546 549 )
547 550 coreconfigitem('experimental', 'sparse-read',
548 551 default=False,
549 552 )
550 553 coreconfigitem('experimental', 'sparse-read.density-threshold',
551 554 default=0.25,
552 555 )
553 556 coreconfigitem('experimental', 'sparse-read.min-gap-size',
554 557 default='256K',
555 558 )
556 559 coreconfigitem('experimental', 'treemanifest',
557 560 default=False,
558 561 )
559 562 coreconfigitem('extensions', '.*',
560 563 default=None,
561 564 generic=True,
562 565 )
563 566 coreconfigitem('extdata', '.*',
564 567 default=None,
565 568 generic=True,
566 569 )
567 570 coreconfigitem('format', 'aggressivemergedeltas',
568 571 default=False,
569 572 )
570 573 coreconfigitem('format', 'chunkcachesize',
571 574 default=None,
572 575 )
573 576 coreconfigitem('format', 'dotencode',
574 577 default=True,
575 578 )
576 579 coreconfigitem('format', 'generaldelta',
577 580 default=False,
578 581 )
579 582 coreconfigitem('format', 'manifestcachesize',
580 583 default=None,
581 584 )
582 585 coreconfigitem('format', 'maxchainlen',
583 586 default=None,
584 587 )
585 588 coreconfigitem('format', 'obsstore-version',
586 589 default=None,
587 590 )
588 591 coreconfigitem('format', 'usefncache',
589 592 default=True,
590 593 )
591 594 coreconfigitem('format', 'usegeneraldelta',
592 595 default=True,
593 596 )
594 597 coreconfigitem('format', 'usestore',
595 598 default=True,
596 599 )
597 600 coreconfigitem('fsmonitor', 'warn_when_unused',
598 601 default=True,
599 602 )
600 603 coreconfigitem('fsmonitor', 'warn_update_file_count',
601 604 default=50000,
602 605 )
603 606 coreconfigitem('hooks', '.*',
604 607 default=dynamicdefault,
605 608 generic=True,
606 609 )
607 610 coreconfigitem('hgweb-paths', '.*',
608 611 default=list,
609 612 generic=True,
610 613 )
611 614 coreconfigitem('hostfingerprints', '.*',
612 615 default=list,
613 616 generic=True,
614 617 )
615 618 coreconfigitem('hostsecurity', 'ciphers',
616 619 default=None,
617 620 )
618 621 coreconfigitem('hostsecurity', 'disabletls10warning',
619 622 default=False,
620 623 )
621 624 coreconfigitem('hostsecurity', 'minimumprotocol',
622 625 default=dynamicdefault,
623 626 )
624 627 coreconfigitem('hostsecurity', '.*:minimumprotocol$',
625 628 default=dynamicdefault,
626 629 generic=True,
627 630 )
628 631 coreconfigitem('hostsecurity', '.*:ciphers$',
629 632 default=dynamicdefault,
630 633 generic=True,
631 634 )
632 635 coreconfigitem('hostsecurity', '.*:fingerprints$',
633 636 default=list,
634 637 generic=True,
635 638 )
636 639 coreconfigitem('hostsecurity', '.*:verifycertsfile$',
637 640 default=None,
638 641 generic=True,
639 642 )
640 643
641 644 coreconfigitem('http_proxy', 'always',
642 645 default=False,
643 646 )
644 647 coreconfigitem('http_proxy', 'host',
645 648 default=None,
646 649 )
647 650 coreconfigitem('http_proxy', 'no',
648 651 default=list,
649 652 )
650 653 coreconfigitem('http_proxy', 'passwd',
651 654 default=None,
652 655 )
653 656 coreconfigitem('http_proxy', 'user',
654 657 default=None,
655 658 )
656 659 coreconfigitem('logtoprocess', 'commandexception',
657 660 default=None,
658 661 )
659 662 coreconfigitem('logtoprocess', 'commandfinish',
660 663 default=None,
661 664 )
662 665 coreconfigitem('logtoprocess', 'command',
663 666 default=None,
664 667 )
665 668 coreconfigitem('logtoprocess', 'develwarn',
666 669 default=None,
667 670 )
668 671 coreconfigitem('logtoprocess', 'uiblocked',
669 672 default=None,
670 673 )
671 674 coreconfigitem('merge', 'checkunknown',
672 675 default='abort',
673 676 )
674 677 coreconfigitem('merge', 'checkignored',
675 678 default='abort',
676 679 )
677 680 coreconfigitem('experimental', 'merge.checkpathconflicts',
678 681 default=False,
679 682 )
680 683 coreconfigitem('merge', 'followcopies',
681 684 default=True,
682 685 )
683 686 coreconfigitem('merge', 'on-failure',
684 687 default='continue',
685 688 )
686 689 coreconfigitem('merge', 'preferancestor',
687 690 default=lambda: ['*'],
688 691 )
689 692 coreconfigitem('merge-tools', '.*',
690 693 default=None,
691 694 generic=True,
692 695 )
693 696 coreconfigitem('merge-tools', br'.*\.args$',
694 697 default="$local $base $other",
695 698 generic=True,
696 699 priority=-1,
697 700 )
698 701 coreconfigitem('merge-tools', br'.*\.binary$',
699 702 default=False,
700 703 generic=True,
701 704 priority=-1,
702 705 )
703 706 coreconfigitem('merge-tools', br'.*\.check$',
704 707 default=list,
705 708 generic=True,
706 709 priority=-1,
707 710 )
708 711 coreconfigitem('merge-tools', br'.*\.checkchanged$',
709 712 default=False,
710 713 generic=True,
711 714 priority=-1,
712 715 )
713 716 coreconfigitem('merge-tools', br'.*\.executable$',
714 717 default=dynamicdefault,
715 718 generic=True,
716 719 priority=-1,
717 720 )
718 721 coreconfigitem('merge-tools', br'.*\.fixeol$',
719 722 default=False,
720 723 generic=True,
721 724 priority=-1,
722 725 )
723 726 coreconfigitem('merge-tools', br'.*\.gui$',
724 727 default=False,
725 728 generic=True,
726 729 priority=-1,
727 730 )
728 731 coreconfigitem('merge-tools', br'.*\.priority$',
729 732 default=0,
730 733 generic=True,
731 734 priority=-1,
732 735 )
733 736 coreconfigitem('merge-tools', br'.*\.premerge$',
734 737 default=dynamicdefault,
735 738 generic=True,
736 739 priority=-1,
737 740 )
738 741 coreconfigitem('merge-tools', br'.*\.symlink$',
739 742 default=False,
740 743 generic=True,
741 744 priority=-1,
742 745 )
743 746 coreconfigitem('pager', 'attend-.*',
744 747 default=dynamicdefault,
745 748 generic=True,
746 749 )
747 750 coreconfigitem('pager', 'ignore',
748 751 default=list,
749 752 )
750 753 coreconfigitem('pager', 'pager',
751 754 default=dynamicdefault,
752 755 )
753 756 coreconfigitem('patch', 'eol',
754 757 default='strict',
755 758 )
756 759 coreconfigitem('patch', 'fuzz',
757 760 default=2,
758 761 )
759 762 coreconfigitem('paths', 'default',
760 763 default=None,
761 764 )
762 765 coreconfigitem('paths', 'default-push',
763 766 default=None,
764 767 )
765 768 coreconfigitem('paths', '.*',
766 769 default=None,
767 770 generic=True,
768 771 )
769 772 coreconfigitem('phases', 'checksubrepos',
770 773 default='follow',
771 774 )
772 775 coreconfigitem('phases', 'new-commit',
773 776 default='draft',
774 777 )
775 778 coreconfigitem('phases', 'publish',
776 779 default=True,
777 780 )
778 781 coreconfigitem('profiling', 'enabled',
779 782 default=False,
780 783 )
781 784 coreconfigitem('profiling', 'format',
782 785 default='text',
783 786 )
784 787 coreconfigitem('profiling', 'freq',
785 788 default=1000,
786 789 )
787 790 coreconfigitem('profiling', 'limit',
788 791 default=30,
789 792 )
790 793 coreconfigitem('profiling', 'nested',
791 794 default=0,
792 795 )
793 796 coreconfigitem('profiling', 'output',
794 797 default=None,
795 798 )
796 799 coreconfigitem('profiling', 'showmax',
797 800 default=0.999,
798 801 )
799 802 coreconfigitem('profiling', 'showmin',
800 803 default=dynamicdefault,
801 804 )
802 805 coreconfigitem('profiling', 'sort',
803 806 default='inlinetime',
804 807 )
805 808 coreconfigitem('profiling', 'statformat',
806 809 default='hotpath',
807 810 )
808 811 coreconfigitem('profiling', 'type',
809 812 default='stat',
810 813 )
811 814 coreconfigitem('progress', 'assume-tty',
812 815 default=False,
813 816 )
814 817 coreconfigitem('progress', 'changedelay',
815 818 default=1,
816 819 )
817 820 coreconfigitem('progress', 'clear-complete',
818 821 default=True,
819 822 )
820 823 coreconfigitem('progress', 'debug',
821 824 default=False,
822 825 )
823 826 coreconfigitem('progress', 'delay',
824 827 default=3,
825 828 )
826 829 coreconfigitem('progress', 'disable',
827 830 default=False,
828 831 )
829 832 coreconfigitem('progress', 'estimateinterval',
830 833 default=60.0,
831 834 )
832 835 coreconfigitem('progress', 'format',
833 836 default=lambda: ['topic', 'bar', 'number', 'estimate'],
834 837 )
835 838 coreconfigitem('progress', 'refresh',
836 839 default=0.1,
837 840 )
838 841 coreconfigitem('progress', 'width',
839 842 default=dynamicdefault,
840 843 )
841 844 coreconfigitem('push', 'pushvars.server',
842 845 default=False,
843 846 )
844 847 coreconfigitem('server', 'bookmarks-pushkey-compat',
845 848 default=True,
846 849 )
847 850 coreconfigitem('server', 'bundle1',
848 851 default=True,
849 852 )
850 853 coreconfigitem('server', 'bundle1gd',
851 854 default=None,
852 855 )
853 856 coreconfigitem('server', 'bundle1.pull',
854 857 default=None,
855 858 )
856 859 coreconfigitem('server', 'bundle1gd.pull',
857 860 default=None,
858 861 )
859 862 coreconfigitem('server', 'bundle1.push',
860 863 default=None,
861 864 )
862 865 coreconfigitem('server', 'bundle1gd.push',
863 866 default=None,
864 867 )
865 868 coreconfigitem('server', 'compressionengines',
866 869 default=list,
867 870 )
868 871 coreconfigitem('server', 'concurrent-push-mode',
869 872 default='strict',
870 873 )
871 874 coreconfigitem('server', 'disablefullbundle',
872 875 default=False,
873 876 )
874 877 coreconfigitem('server', 'maxhttpheaderlen',
875 878 default=1024,
876 879 )
877 880 coreconfigitem('server', 'preferuncompressed',
878 881 default=False,
879 882 )
880 883 coreconfigitem('server', 'uncompressed',
881 884 default=True,
882 885 )
883 886 coreconfigitem('server', 'uncompressedallowsecret',
884 887 default=False,
885 888 )
886 889 coreconfigitem('server', 'validate',
887 890 default=False,
888 891 )
889 892 coreconfigitem('server', 'zliblevel',
890 893 default=-1,
891 894 )
892 895 coreconfigitem('share', 'pool',
893 896 default=None,
894 897 )
895 898 coreconfigitem('share', 'poolnaming',
896 899 default='identity',
897 900 )
898 901 coreconfigitem('smtp', 'host',
899 902 default=None,
900 903 )
901 904 coreconfigitem('smtp', 'local_hostname',
902 905 default=None,
903 906 )
904 907 coreconfigitem('smtp', 'password',
905 908 default=None,
906 909 )
907 910 coreconfigitem('smtp', 'port',
908 911 default=dynamicdefault,
909 912 )
910 913 coreconfigitem('smtp', 'tls',
911 914 default='none',
912 915 )
913 916 coreconfigitem('smtp', 'username',
914 917 default=None,
915 918 )
916 919 coreconfigitem('sparse', 'missingwarning',
917 920 default=True,
918 921 )
919 922 coreconfigitem('subrepos', 'allowed',
920 923 default=dynamicdefault, # to make backporting simpler
921 924 )
922 925 coreconfigitem('subrepos', 'hg:allowed',
923 926 default=dynamicdefault,
924 927 )
925 928 coreconfigitem('subrepos', 'git:allowed',
926 929 default=dynamicdefault,
927 930 )
928 931 coreconfigitem('subrepos', 'svn:allowed',
929 932 default=dynamicdefault,
930 933 )
931 934 coreconfigitem('templates', '.*',
932 935 default=None,
933 936 generic=True,
934 937 )
935 938 coreconfigitem('trusted', 'groups',
936 939 default=list,
937 940 )
938 941 coreconfigitem('trusted', 'users',
939 942 default=list,
940 943 )
941 944 coreconfigitem('ui', '_usedassubrepo',
942 945 default=False,
943 946 )
944 947 coreconfigitem('ui', 'allowemptycommit',
945 948 default=False,
946 949 )
947 950 coreconfigitem('ui', 'archivemeta',
948 951 default=True,
949 952 )
950 953 coreconfigitem('ui', 'askusername',
951 954 default=False,
952 955 )
953 956 coreconfigitem('ui', 'clonebundlefallback',
954 957 default=False,
955 958 )
956 959 coreconfigitem('ui', 'clonebundleprefers',
957 960 default=list,
958 961 )
959 962 coreconfigitem('ui', 'clonebundles',
960 963 default=True,
961 964 )
962 965 coreconfigitem('ui', 'color',
963 966 default='auto',
964 967 )
965 968 coreconfigitem('ui', 'commitsubrepos',
966 969 default=False,
967 970 )
968 971 coreconfigitem('ui', 'debug',
969 972 default=False,
970 973 )
971 974 coreconfigitem('ui', 'debugger',
972 975 default=None,
973 976 )
974 977 coreconfigitem('ui', 'editor',
975 978 default=dynamicdefault,
976 979 )
977 980 coreconfigitem('ui', 'fallbackencoding',
978 981 default=None,
979 982 )
980 983 coreconfigitem('ui', 'forcecwd',
981 984 default=None,
982 985 )
983 986 coreconfigitem('ui', 'forcemerge',
984 987 default=None,
985 988 )
986 989 coreconfigitem('ui', 'formatdebug',
987 990 default=False,
988 991 )
989 992 coreconfigitem('ui', 'formatjson',
990 993 default=False,
991 994 )
992 995 coreconfigitem('ui', 'formatted',
993 996 default=None,
994 997 )
995 998 coreconfigitem('ui', 'graphnodetemplate',
996 999 default=None,
997 1000 )
998 1001 coreconfigitem('ui', 'http2debuglevel',
999 1002 default=None,
1000 1003 )
1001 1004 coreconfigitem('ui', 'interactive',
1002 1005 default=None,
1003 1006 )
1004 1007 coreconfigitem('ui', 'interface',
1005 1008 default=None,
1006 1009 )
1007 1010 coreconfigitem('ui', 'interface.chunkselector',
1008 1011 default=None,
1009 1012 )
1010 1013 coreconfigitem('ui', 'logblockedtimes',
1011 1014 default=False,
1012 1015 )
1013 1016 coreconfigitem('ui', 'logtemplate',
1014 1017 default=None,
1015 1018 )
1016 1019 coreconfigitem('ui', 'merge',
1017 1020 default=None,
1018 1021 )
1019 1022 coreconfigitem('ui', 'mergemarkers',
1020 1023 default='basic',
1021 1024 )
1022 1025 coreconfigitem('ui', 'mergemarkertemplate',
1023 1026 default=('{node|short} '
1024 1027 '{ifeq(tags, "tip", "", '
1025 1028 'ifeq(tags, "", "", "{tags} "))}'
1026 1029 '{if(bookmarks, "{bookmarks} ")}'
1027 1030 '{ifeq(branch, "default", "", "{branch} ")}'
1028 1031 '- {author|user}: {desc|firstline}')
1029 1032 )
1030 1033 coreconfigitem('ui', 'nontty',
1031 1034 default=False,
1032 1035 )
1033 1036 coreconfigitem('ui', 'origbackuppath',
1034 1037 default=None,
1035 1038 )
1036 1039 coreconfigitem('ui', 'paginate',
1037 1040 default=True,
1038 1041 )
1039 1042 coreconfigitem('ui', 'patch',
1040 1043 default=None,
1041 1044 )
1042 1045 coreconfigitem('ui', 'portablefilenames',
1043 1046 default='warn',
1044 1047 )
1045 1048 coreconfigitem('ui', 'promptecho',
1046 1049 default=False,
1047 1050 )
1048 1051 coreconfigitem('ui', 'quiet',
1049 1052 default=False,
1050 1053 )
1051 1054 coreconfigitem('ui', 'quietbookmarkmove',
1052 1055 default=False,
1053 1056 )
1054 1057 coreconfigitem('ui', 'remotecmd',
1055 1058 default='hg',
1056 1059 )
1057 1060 coreconfigitem('ui', 'report_untrusted',
1058 1061 default=True,
1059 1062 )
1060 1063 coreconfigitem('ui', 'rollback',
1061 1064 default=True,
1062 1065 )
1063 1066 coreconfigitem('ui', 'slash',
1064 1067 default=False,
1065 1068 )
1066 1069 coreconfigitem('ui', 'ssh',
1067 1070 default='ssh',
1068 1071 )
1069 1072 coreconfigitem('ui', 'ssherrorhint',
1070 1073 default=None,
1071 1074 )
1072 1075 coreconfigitem('ui', 'statuscopies',
1073 1076 default=False,
1074 1077 )
1075 1078 coreconfigitem('ui', 'strict',
1076 1079 default=False,
1077 1080 )
1078 1081 coreconfigitem('ui', 'style',
1079 1082 default='',
1080 1083 )
1081 1084 coreconfigitem('ui', 'supportcontact',
1082 1085 default=None,
1083 1086 )
1084 1087 coreconfigitem('ui', 'textwidth',
1085 1088 default=78,
1086 1089 )
1087 1090 coreconfigitem('ui', 'timeout',
1088 1091 default='600',
1089 1092 )
1090 1093 coreconfigitem('ui', 'timeout.warn',
1091 1094 default=0,
1092 1095 )
1093 1096 coreconfigitem('ui', 'traceback',
1094 1097 default=False,
1095 1098 )
1096 1099 coreconfigitem('ui', 'tweakdefaults',
1097 1100 default=False,
1098 1101 )
1099 1102 coreconfigitem('ui', 'usehttp2',
1100 1103 default=False,
1101 1104 )
1102 1105 coreconfigitem('ui', 'username',
1103 1106 alias=[('ui', 'user')]
1104 1107 )
1105 1108 coreconfigitem('ui', 'verbose',
1106 1109 default=False,
1107 1110 )
1108 1111 coreconfigitem('verify', 'skipflags',
1109 1112 default=None,
1110 1113 )
1111 1114 coreconfigitem('web', 'allowbz2',
1112 1115 default=False,
1113 1116 )
1114 1117 coreconfigitem('web', 'allowgz',
1115 1118 default=False,
1116 1119 )
1117 1120 coreconfigitem('web', 'allow-pull',
1118 1121 alias=[('web', 'allowpull')],
1119 1122 default=True,
1120 1123 )
1121 1124 coreconfigitem('web', 'allow-push',
1122 1125 alias=[('web', 'allow_push')],
1123 1126 default=list,
1124 1127 )
1125 1128 coreconfigitem('web', 'allowzip',
1126 1129 default=False,
1127 1130 )
1128 1131 coreconfigitem('web', 'archivesubrepos',
1129 1132 default=False,
1130 1133 )
1131 1134 coreconfigitem('web', 'cache',
1132 1135 default=True,
1133 1136 )
1134 1137 coreconfigitem('web', 'contact',
1135 1138 default=None,
1136 1139 )
1137 1140 coreconfigitem('web', 'deny_push',
1138 1141 default=list,
1139 1142 )
1140 1143 coreconfigitem('web', 'guessmime',
1141 1144 default=False,
1142 1145 )
1143 1146 coreconfigitem('web', 'hidden',
1144 1147 default=False,
1145 1148 )
1146 1149 coreconfigitem('web', 'labels',
1147 1150 default=list,
1148 1151 )
1149 1152 coreconfigitem('web', 'logoimg',
1150 1153 default='hglogo.png',
1151 1154 )
1152 1155 coreconfigitem('web', 'logourl',
1153 1156 default='https://mercurial-scm.org/',
1154 1157 )
1155 1158 coreconfigitem('web', 'accesslog',
1156 1159 default='-',
1157 1160 )
1158 1161 coreconfigitem('web', 'address',
1159 1162 default='',
1160 1163 )
1161 1164 coreconfigitem('web', 'allow_archive',
1162 1165 default=list,
1163 1166 )
1164 1167 coreconfigitem('web', 'allow_read',
1165 1168 default=list,
1166 1169 )
1167 1170 coreconfigitem('web', 'baseurl',
1168 1171 default=None,
1169 1172 )
1170 1173 coreconfigitem('web', 'cacerts',
1171 1174 default=None,
1172 1175 )
1173 1176 coreconfigitem('web', 'certificate',
1174 1177 default=None,
1175 1178 )
1176 1179 coreconfigitem('web', 'collapse',
1177 1180 default=False,
1178 1181 )
1179 1182 coreconfigitem('web', 'csp',
1180 1183 default=None,
1181 1184 )
1182 1185 coreconfigitem('web', 'deny_read',
1183 1186 default=list,
1184 1187 )
1185 1188 coreconfigitem('web', 'descend',
1186 1189 default=True,
1187 1190 )
1188 1191 coreconfigitem('web', 'description',
1189 1192 default="",
1190 1193 )
1191 1194 coreconfigitem('web', 'encoding',
1192 1195 default=lambda: encoding.encoding,
1193 1196 )
1194 1197 coreconfigitem('web', 'errorlog',
1195 1198 default='-',
1196 1199 )
1197 1200 coreconfigitem('web', 'ipv6',
1198 1201 default=False,
1199 1202 )
1200 1203 coreconfigitem('web', 'maxchanges',
1201 1204 default=10,
1202 1205 )
1203 1206 coreconfigitem('web', 'maxfiles',
1204 1207 default=10,
1205 1208 )
1206 1209 coreconfigitem('web', 'maxshortchanges',
1207 1210 default=60,
1208 1211 )
1209 1212 coreconfigitem('web', 'motd',
1210 1213 default='',
1211 1214 )
1212 1215 coreconfigitem('web', 'name',
1213 1216 default=dynamicdefault,
1214 1217 )
1215 1218 coreconfigitem('web', 'port',
1216 1219 default=8000,
1217 1220 )
1218 1221 coreconfigitem('web', 'prefix',
1219 1222 default='',
1220 1223 )
1221 1224 coreconfigitem('web', 'push_ssl',
1222 1225 default=True,
1223 1226 )
1224 1227 coreconfigitem('web', 'refreshinterval',
1225 1228 default=20,
1226 1229 )
1227 1230 coreconfigitem('web', 'staticurl',
1228 1231 default=None,
1229 1232 )
1230 1233 coreconfigitem('web', 'stripes',
1231 1234 default=1,
1232 1235 )
1233 1236 coreconfigitem('web', 'style',
1234 1237 default='paper',
1235 1238 )
1236 1239 coreconfigitem('web', 'templates',
1237 1240 default=None,
1238 1241 )
1239 1242 coreconfigitem('web', 'view',
1240 1243 default='served',
1241 1244 )
1242 1245 coreconfigitem('worker', 'backgroundclose',
1243 1246 default=dynamicdefault,
1244 1247 )
1245 1248 # Windows defaults to a limit of 512 open files. A buffer of 128
1246 1249 # should give us enough headway.
1247 1250 coreconfigitem('worker', 'backgroundclosemaxqueue',
1248 1251 default=384,
1249 1252 )
1250 1253 coreconfigitem('worker', 'backgroundcloseminfilecount',
1251 1254 default=2048,
1252 1255 )
1253 1256 coreconfigitem('worker', 'backgroundclosethreadcount',
1254 1257 default=4,
1255 1258 )
1256 1259 coreconfigitem('worker', 'numcpus',
1257 1260 default=None,
1258 1261 )
1259 1262
1260 1263 # Rebase related configuration moved to core because other extension are doing
1261 1264 # strange things. For example, shelve import the extensions to reuse some bit
1262 1265 # without formally loading it.
1263 1266 coreconfigitem('commands', 'rebase.requiredest',
1264 1267 default=False,
1265 1268 )
1266 1269 coreconfigitem('experimental', 'rebaseskipobsolete',
1267 1270 default=True,
1268 1271 )
1269 1272 coreconfigitem('rebase', 'singletransaction',
1270 1273 default=False,
1271 1274 )
@@ -1,491 +1,492 b''
1 1 # mdiff.py - diff and patch routines for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import re
11 11 import struct
12 12 import zlib
13 13
14 14 from .i18n import _
15 15 from . import (
16 16 error,
17 17 policy,
18 18 pycompat,
19 19 util,
20 20 )
21 21
22 22 bdiff = policy.importmod(r'bdiff')
23 23 mpatch = policy.importmod(r'mpatch')
24 24
25 25 blocks = bdiff.blocks
26 26 fixws = bdiff.fixws
27 27 patches = mpatch.patches
28 28 patchedsize = mpatch.patchedsize
29 29 textdiff = bdiff.bdiff
30 30
31 31 def splitnewlines(text):
32 32 '''like str.splitlines, but only split on newlines.'''
33 33 lines = [l + '\n' for l in text.split('\n')]
34 34 if lines:
35 35 if lines[-1] == '\n':
36 36 lines.pop()
37 37 else:
38 38 lines[-1] = lines[-1][:-1]
39 39 return lines
40 40
41 41 class diffopts(object):
42 42 '''context is the number of context lines
43 43 text treats all files as text
44 44 showfunc enables diff -p output
45 45 git enables the git extended patch format
46 46 nodates removes dates from diff headers
47 47 nobinary ignores binary files
48 48 noprefix disables the 'a/' and 'b/' prefixes (ignored in plain mode)
49 49 ignorews ignores all whitespace changes in the diff
50 50 ignorewsamount ignores changes in the amount of whitespace
51 51 ignoreblanklines ignores changes whose lines are all blank
52 52 upgrade generates git diffs to avoid data loss
53 53 '''
54 54
55 55 defaults = {
56 56 'context': 3,
57 57 'text': False,
58 58 'showfunc': False,
59 59 'git': False,
60 60 'nodates': False,
61 61 'nobinary': False,
62 62 'noprefix': False,
63 63 'index': 0,
64 64 'ignorews': False,
65 65 'ignorewsamount': False,
66 66 'ignorewseol': False,
67 67 'ignoreblanklines': False,
68 68 'upgrade': False,
69 69 'showsimilarity': False,
70 'worddiff': False,
70 71 }
71 72
72 73 def __init__(self, **opts):
73 74 opts = pycompat.byteskwargs(opts)
74 75 for k in self.defaults.keys():
75 76 v = opts.get(k)
76 77 if v is None:
77 78 v = self.defaults[k]
78 79 setattr(self, k, v)
79 80
80 81 try:
81 82 self.context = int(self.context)
82 83 except ValueError:
83 84 raise error.Abort(_('diff context lines count must be '
84 85 'an integer, not %r') % self.context)
85 86
86 87 def copy(self, **kwargs):
87 88 opts = dict((k, getattr(self, k)) for k in self.defaults)
88 89 opts = pycompat.strkwargs(opts)
89 90 opts.update(kwargs)
90 91 return diffopts(**opts)
91 92
92 93 defaultopts = diffopts()
93 94
94 95 def wsclean(opts, text, blank=True):
95 96 if opts.ignorews:
96 97 text = bdiff.fixws(text, 1)
97 98 elif opts.ignorewsamount:
98 99 text = bdiff.fixws(text, 0)
99 100 if blank and opts.ignoreblanklines:
100 101 text = re.sub('\n+', '\n', text).strip('\n')
101 102 if opts.ignorewseol:
102 103 text = re.sub(r'[ \t\r\f]+\n', r'\n', text)
103 104 return text
104 105
105 106 def splitblock(base1, lines1, base2, lines2, opts):
106 107 # The input lines matches except for interwoven blank lines. We
107 108 # transform it into a sequence of matching blocks and blank blocks.
108 109 lines1 = [(wsclean(opts, l) and 1 or 0) for l in lines1]
109 110 lines2 = [(wsclean(opts, l) and 1 or 0) for l in lines2]
110 111 s1, e1 = 0, len(lines1)
111 112 s2, e2 = 0, len(lines2)
112 113 while s1 < e1 or s2 < e2:
113 114 i1, i2, btype = s1, s2, '='
114 115 if (i1 >= e1 or lines1[i1] == 0
115 116 or i2 >= e2 or lines2[i2] == 0):
116 117 # Consume the block of blank lines
117 118 btype = '~'
118 119 while i1 < e1 and lines1[i1] == 0:
119 120 i1 += 1
120 121 while i2 < e2 and lines2[i2] == 0:
121 122 i2 += 1
122 123 else:
123 124 # Consume the matching lines
124 125 while i1 < e1 and lines1[i1] == 1 and lines2[i2] == 1:
125 126 i1 += 1
126 127 i2 += 1
127 128 yield [base1 + s1, base1 + i1, base2 + s2, base2 + i2], btype
128 129 s1 = i1
129 130 s2 = i2
130 131
131 132 def hunkinrange(hunk, linerange):
132 133 """Return True if `hunk` defined as (start, length) is in `linerange`
133 134 defined as (lowerbound, upperbound).
134 135
135 136 >>> hunkinrange((5, 10), (2, 7))
136 137 True
137 138 >>> hunkinrange((5, 10), (6, 12))
138 139 True
139 140 >>> hunkinrange((5, 10), (13, 17))
140 141 True
141 142 >>> hunkinrange((5, 10), (3, 17))
142 143 True
143 144 >>> hunkinrange((5, 10), (1, 3))
144 145 False
145 146 >>> hunkinrange((5, 10), (18, 20))
146 147 False
147 148 >>> hunkinrange((5, 10), (1, 5))
148 149 False
149 150 >>> hunkinrange((5, 10), (15, 27))
150 151 False
151 152 """
152 153 start, length = hunk
153 154 lowerbound, upperbound = linerange
154 155 return lowerbound < start + length and start < upperbound
155 156
156 157 def blocksinrange(blocks, rangeb):
157 158 """filter `blocks` like (a1, a2, b1, b2) from items outside line range
158 159 `rangeb` from ``(b1, b2)`` point of view.
159 160
160 161 Return `filteredblocks, rangea` where:
161 162
162 163 * `filteredblocks` is list of ``block = (a1, a2, b1, b2), stype`` items of
163 164 `blocks` that are inside `rangeb` from ``(b1, b2)`` point of view; a
164 165 block ``(b1, b2)`` being inside `rangeb` if
165 166 ``rangeb[0] < b2 and b1 < rangeb[1]``;
166 167 * `rangea` is the line range w.r.t. to ``(a1, a2)`` parts of `blocks`.
167 168 """
168 169 lbb, ubb = rangeb
169 170 lba, uba = None, None
170 171 filteredblocks = []
171 172 for block in blocks:
172 173 (a1, a2, b1, b2), stype = block
173 174 if lbb >= b1 and ubb <= b2 and stype == '=':
174 175 # rangeb is within a single "=" hunk, restrict back linerange1
175 176 # by offsetting rangeb
176 177 lba = lbb - b1 + a1
177 178 uba = ubb - b1 + a1
178 179 else:
179 180 if b1 <= lbb < b2:
180 181 if stype == '=':
181 182 lba = a2 - (b2 - lbb)
182 183 else:
183 184 lba = a1
184 185 if b1 < ubb <= b2:
185 186 if stype == '=':
186 187 uba = a1 + (ubb - b1)
187 188 else:
188 189 uba = a2
189 190 if hunkinrange((b1, (b2 - b1)), rangeb):
190 191 filteredblocks.append(block)
191 192 if lba is None or uba is None or uba < lba:
192 193 raise error.Abort(_('line range exceeds file size'))
193 194 return filteredblocks, (lba, uba)
194 195
195 196 def allblocks(text1, text2, opts=None, lines1=None, lines2=None):
196 197 """Return (block, type) tuples, where block is an mdiff.blocks
197 198 line entry. type is '=' for blocks matching exactly one another
198 199 (bdiff blocks), '!' for non-matching blocks and '~' for blocks
199 200 matching only after having filtered blank lines.
200 201 line1 and line2 are text1 and text2 split with splitnewlines() if
201 202 they are already available.
202 203 """
203 204 if opts is None:
204 205 opts = defaultopts
205 206 if opts.ignorews or opts.ignorewsamount or opts.ignorewseol:
206 207 text1 = wsclean(opts, text1, False)
207 208 text2 = wsclean(opts, text2, False)
208 209 diff = bdiff.blocks(text1, text2)
209 210 for i, s1 in enumerate(diff):
210 211 # The first match is special.
211 212 # we've either found a match starting at line 0 or a match later
212 213 # in the file. If it starts later, old and new below will both be
213 214 # empty and we'll continue to the next match.
214 215 if i > 0:
215 216 s = diff[i - 1]
216 217 else:
217 218 s = [0, 0, 0, 0]
218 219 s = [s[1], s1[0], s[3], s1[2]]
219 220
220 221 # bdiff sometimes gives huge matches past eof, this check eats them,
221 222 # and deals with the special first match case described above
222 223 if s[0] != s[1] or s[2] != s[3]:
223 224 type = '!'
224 225 if opts.ignoreblanklines:
225 226 if lines1 is None:
226 227 lines1 = splitnewlines(text1)
227 228 if lines2 is None:
228 229 lines2 = splitnewlines(text2)
229 230 old = wsclean(opts, "".join(lines1[s[0]:s[1]]))
230 231 new = wsclean(opts, "".join(lines2[s[2]:s[3]]))
231 232 if old == new:
232 233 type = '~'
233 234 yield s, type
234 235 yield s1, '='
235 236
236 237 def unidiff(a, ad, b, bd, fn1, fn2, opts=defaultopts):
237 238 """Return a unified diff as a (headers, hunks) tuple.
238 239
239 240 If the diff is not null, `headers` is a list with unified diff header
240 241 lines "--- <original>" and "+++ <new>" and `hunks` is a generator yielding
241 242 (hunkrange, hunklines) coming from _unidiff().
242 243 Otherwise, `headers` and `hunks` are empty.
243 244 """
244 245 def datetag(date, fn=None):
245 246 if not opts.git and not opts.nodates:
246 247 return '\t%s' % date
247 248 if fn and ' ' in fn:
248 249 return '\t'
249 250 return ''
250 251
251 252 sentinel = [], ()
252 253 if not a and not b:
253 254 return sentinel
254 255
255 256 if opts.noprefix:
256 257 aprefix = bprefix = ''
257 258 else:
258 259 aprefix = 'a/'
259 260 bprefix = 'b/'
260 261
261 262 epoch = util.datestr((0, 0))
262 263
263 264 fn1 = util.pconvert(fn1)
264 265 fn2 = util.pconvert(fn2)
265 266
266 267 def checknonewline(lines):
267 268 for text in lines:
268 269 if text[-1:] != '\n':
269 270 text += "\n\ No newline at end of file\n"
270 271 yield text
271 272
272 273 if not opts.text and (util.binary(a) or util.binary(b)):
273 274 if a and b and len(a) == len(b) and a == b:
274 275 return sentinel
275 276 headerlines = []
276 277 hunks = (None, ['Binary file %s has changed\n' % fn1]),
277 278 elif not a:
278 279 b = splitnewlines(b)
279 280 if a is None:
280 281 l1 = '--- /dev/null%s' % datetag(epoch)
281 282 else:
282 283 l1 = "--- %s%s%s" % (aprefix, fn1, datetag(ad, fn1))
283 284 l2 = "+++ %s%s" % (bprefix + fn2, datetag(bd, fn2))
284 285 headerlines = [l1, l2]
285 286 size = len(b)
286 287 hunkrange = (0, 0, 1, size)
287 288 hunklines = ["@@ -0,0 +1,%d @@\n" % size] + ["+" + e for e in b]
288 289 hunks = (hunkrange, checknonewline(hunklines)),
289 290 elif not b:
290 291 a = splitnewlines(a)
291 292 l1 = "--- %s%s%s" % (aprefix, fn1, datetag(ad, fn1))
292 293 if b is None:
293 294 l2 = '+++ /dev/null%s' % datetag(epoch)
294 295 else:
295 296 l2 = "+++ %s%s%s" % (bprefix, fn2, datetag(bd, fn2))
296 297 headerlines = [l1, l2]
297 298 size = len(a)
298 299 hunkrange = (1, size, 0, 0)
299 300 hunklines = ["@@ -1,%d +0,0 @@\n" % size] + ["-" + e for e in a]
300 301 hunks = (hunkrange, checknonewline(hunklines)),
301 302 else:
302 303 diffhunks = _unidiff(a, b, opts=opts)
303 304 try:
304 305 hunkrange, hunklines = next(diffhunks)
305 306 except StopIteration:
306 307 return sentinel
307 308
308 309 headerlines = [
309 310 "--- %s%s%s" % (aprefix, fn1, datetag(ad, fn1)),
310 311 "+++ %s%s%s" % (bprefix, fn2, datetag(bd, fn2)),
311 312 ]
312 313 def rewindhunks():
313 314 yield hunkrange, checknonewline(hunklines)
314 315 for hr, hl in diffhunks:
315 316 yield hr, checknonewline(hl)
316 317
317 318 hunks = rewindhunks()
318 319
319 320 return headerlines, hunks
320 321
321 322 def _unidiff(t1, t2, opts=defaultopts):
322 323 """Yield hunks of a headerless unified diff from t1 and t2 texts.
323 324
324 325 Each hunk consists of a (hunkrange, hunklines) tuple where `hunkrange` is a
325 326 tuple (s1, l1, s2, l2) representing the range information of the hunk to
326 327 form the '@@ -s1,l1 +s2,l2 @@' header and `hunklines` is a list of lines
327 328 of the hunk combining said header followed by line additions and
328 329 deletions.
329 330 """
330 331 l1 = splitnewlines(t1)
331 332 l2 = splitnewlines(t2)
332 333 def contextend(l, len):
333 334 ret = l + opts.context
334 335 if ret > len:
335 336 ret = len
336 337 return ret
337 338
338 339 def contextstart(l):
339 340 ret = l - opts.context
340 341 if ret < 0:
341 342 return 0
342 343 return ret
343 344
344 345 lastfunc = [0, '']
345 346 def yieldhunk(hunk):
346 347 (astart, a2, bstart, b2, delta) = hunk
347 348 aend = contextend(a2, len(l1))
348 349 alen = aend - astart
349 350 blen = b2 - bstart + aend - a2
350 351
351 352 func = ""
352 353 if opts.showfunc:
353 354 lastpos, func = lastfunc
354 355 # walk backwards from the start of the context up to the start of
355 356 # the previous hunk context until we find a line starting with an
356 357 # alphanumeric char.
357 358 for i in xrange(astart - 1, lastpos - 1, -1):
358 359 if l1[i][0].isalnum():
359 360 func = ' ' + l1[i].rstrip()[:40]
360 361 lastfunc[1] = func
361 362 break
362 363 # by recording this hunk's starting point as the next place to
363 364 # start looking for function lines, we avoid reading any line in
364 365 # the file more than once.
365 366 lastfunc[0] = astart
366 367
367 368 # zero-length hunk ranges report their start line as one less
368 369 if alen:
369 370 astart += 1
370 371 if blen:
371 372 bstart += 1
372 373
373 374 hunkrange = astart, alen, bstart, blen
374 375 hunklines = (
375 376 ["@@ -%d,%d +%d,%d @@%s\n" % (hunkrange + (func,))]
376 377 + delta
377 378 + [' ' + l1[x] for x in xrange(a2, aend)]
378 379 )
379 380 yield hunkrange, hunklines
380 381
381 382 # bdiff.blocks gives us the matching sequences in the files. The loop
382 383 # below finds the spaces between those matching sequences and translates
383 384 # them into diff output.
384 385 #
385 386 hunk = None
386 387 ignoredlines = 0
387 388 for s, stype in allblocks(t1, t2, opts, l1, l2):
388 389 a1, a2, b1, b2 = s
389 390 if stype != '!':
390 391 if stype == '~':
391 392 # The diff context lines are based on t1 content. When
392 393 # blank lines are ignored, the new lines offsets must
393 394 # be adjusted as if equivalent blocks ('~') had the
394 395 # same sizes on both sides.
395 396 ignoredlines += (b2 - b1) - (a2 - a1)
396 397 continue
397 398 delta = []
398 399 old = l1[a1:a2]
399 400 new = l2[b1:b2]
400 401
401 402 b1 -= ignoredlines
402 403 b2 -= ignoredlines
403 404 astart = contextstart(a1)
404 405 bstart = contextstart(b1)
405 406 prev = None
406 407 if hunk:
407 408 # join with the previous hunk if it falls inside the context
408 409 if astart < hunk[1] + opts.context + 1:
409 410 prev = hunk
410 411 astart = hunk[1]
411 412 bstart = hunk[3]
412 413 else:
413 414 for x in yieldhunk(hunk):
414 415 yield x
415 416 if prev:
416 417 # we've joined the previous hunk, record the new ending points.
417 418 hunk[1] = a2
418 419 hunk[3] = b2
419 420 delta = hunk[4]
420 421 else:
421 422 # create a new hunk
422 423 hunk = [astart, a2, bstart, b2, delta]
423 424
424 425 delta[len(delta):] = [' ' + x for x in l1[astart:a1]]
425 426 delta[len(delta):] = ['-' + x for x in old]
426 427 delta[len(delta):] = ['+' + x for x in new]
427 428
428 429 if hunk:
429 430 for x in yieldhunk(hunk):
430 431 yield x
431 432
432 433 def b85diff(to, tn):
433 434 '''print base85-encoded binary diff'''
434 435 def fmtline(line):
435 436 l = len(line)
436 437 if l <= 26:
437 438 l = chr(ord('A') + l - 1)
438 439 else:
439 440 l = chr(l - 26 + ord('a') - 1)
440 441 return '%c%s\n' % (l, util.b85encode(line, True))
441 442
442 443 def chunk(text, csize=52):
443 444 l = len(text)
444 445 i = 0
445 446 while i < l:
446 447 yield text[i:i + csize]
447 448 i += csize
448 449
449 450 if to is None:
450 451 to = ''
451 452 if tn is None:
452 453 tn = ''
453 454
454 455 if to == tn:
455 456 return ''
456 457
457 458 # TODO: deltas
458 459 ret = []
459 460 ret.append('GIT binary patch\n')
460 461 ret.append('literal %d\n' % len(tn))
461 462 for l in chunk(zlib.compress(tn)):
462 463 ret.append(fmtline(l))
463 464 ret.append('\n')
464 465
465 466 return ''.join(ret)
466 467
467 468 def patchtext(bin):
468 469 pos = 0
469 470 t = []
470 471 while pos < len(bin):
471 472 p1, p2, l = struct.unpack(">lll", bin[pos:pos + 12])
472 473 pos += 12
473 474 t.append(bin[pos:pos + l])
474 475 pos += l
475 476 return "".join(t)
476 477
477 478 def patch(a, bin):
478 479 if len(a) == 0:
479 480 # skip over trivial delta header
480 481 return util.buffer(bin, 12)
481 482 return mpatch.patches(a, [bin])
482 483
483 484 # similar to difflib.SequenceMatcher.get_matching_blocks
484 485 def get_matching_blocks(a, b):
485 486 return [(d[0], d[2], d[1] - d[0]) for d in bdiff.blocks(a, b)]
486 487
487 488 def trivialdiffheader(length):
488 489 return struct.pack(">lll", 0, 0, length) if length else ''
489 490
490 491 def replacediffheader(oldlen, newlen):
491 492 return struct.pack(">lll", 0, oldlen, newlen)
@@ -1,2816 +1,2895 b''
1 1 # patch.py - patch file parsing routines
2 2 #
3 3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
4 4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 from __future__ import absolute_import, print_function
10 10
11 11 import collections
12 12 import copy
13 import difflib
13 14 import email
14 15 import errno
15 16 import hashlib
16 17 import os
17 18 import posixpath
18 19 import re
19 20 import shutil
20 21 import tempfile
21 22 import zlib
22 23
23 24 from .i18n import _
24 25 from .node import (
25 26 hex,
26 27 short,
27 28 )
28 29 from . import (
29 30 copies,
30 31 encoding,
31 32 error,
32 33 mail,
33 34 mdiff,
34 35 pathutil,
35 36 policy,
36 37 pycompat,
37 38 scmutil,
38 39 similar,
39 40 util,
40 41 vfs as vfsmod,
41 42 )
42 43
43 44 diffhelpers = policy.importmod(r'diffhelpers')
44 45 stringio = util.stringio
45 46
46 47 gitre = re.compile(br'diff --git a/(.*) b/(.*)')
47 48 tabsplitter = re.compile(br'(\t+|[^\t]+)')
48 49
49 50 PatchError = error.PatchError
50 51
51 52 # public functions
52 53
53 54 def split(stream):
54 55 '''return an iterator of individual patches from a stream'''
55 56 def isheader(line, inheader):
56 57 if inheader and line[0] in (' ', '\t'):
57 58 # continuation
58 59 return True
59 60 if line[0] in (' ', '-', '+'):
60 61 # diff line - don't check for header pattern in there
61 62 return False
62 63 l = line.split(': ', 1)
63 64 return len(l) == 2 and ' ' not in l[0]
64 65
65 66 def chunk(lines):
66 67 return stringio(''.join(lines))
67 68
68 69 def hgsplit(stream, cur):
69 70 inheader = True
70 71
71 72 for line in stream:
72 73 if not line.strip():
73 74 inheader = False
74 75 if not inheader and line.startswith('# HG changeset patch'):
75 76 yield chunk(cur)
76 77 cur = []
77 78 inheader = True
78 79
79 80 cur.append(line)
80 81
81 82 if cur:
82 83 yield chunk(cur)
83 84
84 85 def mboxsplit(stream, cur):
85 86 for line in stream:
86 87 if line.startswith('From '):
87 88 for c in split(chunk(cur[1:])):
88 89 yield c
89 90 cur = []
90 91
91 92 cur.append(line)
92 93
93 94 if cur:
94 95 for c in split(chunk(cur[1:])):
95 96 yield c
96 97
97 98 def mimesplit(stream, cur):
98 99 def msgfp(m):
99 100 fp = stringio()
100 101 g = email.Generator.Generator(fp, mangle_from_=False)
101 102 g.flatten(m)
102 103 fp.seek(0)
103 104 return fp
104 105
105 106 for line in stream:
106 107 cur.append(line)
107 108 c = chunk(cur)
108 109
109 110 m = email.Parser.Parser().parse(c)
110 111 if not m.is_multipart():
111 112 yield msgfp(m)
112 113 else:
113 114 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
114 115 for part in m.walk():
115 116 ct = part.get_content_type()
116 117 if ct not in ok_types:
117 118 continue
118 119 yield msgfp(part)
119 120
120 121 def headersplit(stream, cur):
121 122 inheader = False
122 123
123 124 for line in stream:
124 125 if not inheader and isheader(line, inheader):
125 126 yield chunk(cur)
126 127 cur = []
127 128 inheader = True
128 129 if inheader and not isheader(line, inheader):
129 130 inheader = False
130 131
131 132 cur.append(line)
132 133
133 134 if cur:
134 135 yield chunk(cur)
135 136
136 137 def remainder(cur):
137 138 yield chunk(cur)
138 139
139 140 class fiter(object):
140 141 def __init__(self, fp):
141 142 self.fp = fp
142 143
143 144 def __iter__(self):
144 145 return self
145 146
146 147 def next(self):
147 148 l = self.fp.readline()
148 149 if not l:
149 150 raise StopIteration
150 151 return l
151 152
152 153 __next__ = next
153 154
154 155 inheader = False
155 156 cur = []
156 157
157 158 mimeheaders = ['content-type']
158 159
159 160 if not util.safehasattr(stream, 'next'):
160 161 # http responses, for example, have readline but not next
161 162 stream = fiter(stream)
162 163
163 164 for line in stream:
164 165 cur.append(line)
165 166 if line.startswith('# HG changeset patch'):
166 167 return hgsplit(stream, cur)
167 168 elif line.startswith('From '):
168 169 return mboxsplit(stream, cur)
169 170 elif isheader(line, inheader):
170 171 inheader = True
171 172 if line.split(':', 1)[0].lower() in mimeheaders:
172 173 # let email parser handle this
173 174 return mimesplit(stream, cur)
174 175 elif line.startswith('--- ') and inheader:
175 176 # No evil headers seen by diff start, split by hand
176 177 return headersplit(stream, cur)
177 178 # Not enough info, keep reading
178 179
179 180 # if we are here, we have a very plain patch
180 181 return remainder(cur)
181 182
182 183 ## Some facility for extensible patch parsing:
183 184 # list of pairs ("header to match", "data key")
184 185 patchheadermap = [('Date', 'date'),
185 186 ('Branch', 'branch'),
186 187 ('Node ID', 'nodeid'),
187 188 ]
188 189
189 190 def extract(ui, fileobj):
190 191 '''extract patch from data read from fileobj.
191 192
192 193 patch can be a normal patch or contained in an email message.
193 194
194 195 return a dictionary. Standard keys are:
195 196 - filename,
196 197 - message,
197 198 - user,
198 199 - date,
199 200 - branch,
200 201 - node,
201 202 - p1,
202 203 - p2.
203 204 Any item can be missing from the dictionary. If filename is missing,
204 205 fileobj did not contain a patch. Caller must unlink filename when done.'''
205 206
206 207 # attempt to detect the start of a patch
207 208 # (this heuristic is borrowed from quilt)
208 209 diffre = re.compile(br'^(?:Index:[ \t]|diff[ \t]-|RCS file: |'
209 210 br'retrieving revision [0-9]+(\.[0-9]+)*$|'
210 211 br'---[ \t].*?^\+\+\+[ \t]|'
211 212 br'\*\*\*[ \t].*?^---[ \t])',
212 213 re.MULTILINE | re.DOTALL)
213 214
214 215 data = {}
215 216 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
216 217 tmpfp = os.fdopen(fd, pycompat.sysstr('w'))
217 218 try:
218 219 msg = email.Parser.Parser().parse(fileobj)
219 220
220 221 subject = msg['Subject'] and mail.headdecode(msg['Subject'])
221 222 data['user'] = msg['From'] and mail.headdecode(msg['From'])
222 223 if not subject and not data['user']:
223 224 # Not an email, restore parsed headers if any
224 225 subject = '\n'.join(': '.join(h) for h in msg.items()) + '\n'
225 226
226 227 # should try to parse msg['Date']
227 228 parents = []
228 229
229 230 if subject:
230 231 if subject.startswith('[PATCH'):
231 232 pend = subject.find(']')
232 233 if pend >= 0:
233 234 subject = subject[pend + 1:].lstrip()
234 235 subject = re.sub(br'\n[ \t]+', ' ', subject)
235 236 ui.debug('Subject: %s\n' % subject)
236 237 if data['user']:
237 238 ui.debug('From: %s\n' % data['user'])
238 239 diffs_seen = 0
239 240 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
240 241 message = ''
241 242 for part in msg.walk():
242 243 content_type = part.get_content_type()
243 244 ui.debug('Content-Type: %s\n' % content_type)
244 245 if content_type not in ok_types:
245 246 continue
246 247 payload = part.get_payload(decode=True)
247 248 m = diffre.search(payload)
248 249 if m:
249 250 hgpatch = False
250 251 hgpatchheader = False
251 252 ignoretext = False
252 253
253 254 ui.debug('found patch at byte %d\n' % m.start(0))
254 255 diffs_seen += 1
255 256 cfp = stringio()
256 257 for line in payload[:m.start(0)].splitlines():
257 258 if line.startswith('# HG changeset patch') and not hgpatch:
258 259 ui.debug('patch generated by hg export\n')
259 260 hgpatch = True
260 261 hgpatchheader = True
261 262 # drop earlier commit message content
262 263 cfp.seek(0)
263 264 cfp.truncate()
264 265 subject = None
265 266 elif hgpatchheader:
266 267 if line.startswith('# User '):
267 268 data['user'] = line[7:]
268 269 ui.debug('From: %s\n' % data['user'])
269 270 elif line.startswith("# Parent "):
270 271 parents.append(line[9:].lstrip())
271 272 elif line.startswith("# "):
272 273 for header, key in patchheadermap:
273 274 prefix = '# %s ' % header
274 275 if line.startswith(prefix):
275 276 data[key] = line[len(prefix):]
276 277 else:
277 278 hgpatchheader = False
278 279 elif line == '---':
279 280 ignoretext = True
280 281 if not hgpatchheader and not ignoretext:
281 282 cfp.write(line)
282 283 cfp.write('\n')
283 284 message = cfp.getvalue()
284 285 if tmpfp:
285 286 tmpfp.write(payload)
286 287 if not payload.endswith('\n'):
287 288 tmpfp.write('\n')
288 289 elif not diffs_seen and message and content_type == 'text/plain':
289 290 message += '\n' + payload
290 291 except: # re-raises
291 292 tmpfp.close()
292 293 os.unlink(tmpname)
293 294 raise
294 295
295 296 if subject and not message.startswith(subject):
296 297 message = '%s\n%s' % (subject, message)
297 298 data['message'] = message
298 299 tmpfp.close()
299 300 if parents:
300 301 data['p1'] = parents.pop(0)
301 302 if parents:
302 303 data['p2'] = parents.pop(0)
303 304
304 305 if diffs_seen:
305 306 data['filename'] = tmpname
306 307 else:
307 308 os.unlink(tmpname)
308 309 return data
309 310
310 311 class patchmeta(object):
311 312 """Patched file metadata
312 313
313 314 'op' is the performed operation within ADD, DELETE, RENAME, MODIFY
314 315 or COPY. 'path' is patched file path. 'oldpath' is set to the
315 316 origin file when 'op' is either COPY or RENAME, None otherwise. If
316 317 file mode is changed, 'mode' is a tuple (islink, isexec) where
317 318 'islink' is True if the file is a symlink and 'isexec' is True if
318 319 the file is executable. Otherwise, 'mode' is None.
319 320 """
320 321 def __init__(self, path):
321 322 self.path = path
322 323 self.oldpath = None
323 324 self.mode = None
324 325 self.op = 'MODIFY'
325 326 self.binary = False
326 327
327 328 def setmode(self, mode):
328 329 islink = mode & 0o20000
329 330 isexec = mode & 0o100
330 331 self.mode = (islink, isexec)
331 332
332 333 def copy(self):
333 334 other = patchmeta(self.path)
334 335 other.oldpath = self.oldpath
335 336 other.mode = self.mode
336 337 other.op = self.op
337 338 other.binary = self.binary
338 339 return other
339 340
340 341 def _ispatchinga(self, afile):
341 342 if afile == '/dev/null':
342 343 return self.op == 'ADD'
343 344 return afile == 'a/' + (self.oldpath or self.path)
344 345
345 346 def _ispatchingb(self, bfile):
346 347 if bfile == '/dev/null':
347 348 return self.op == 'DELETE'
348 349 return bfile == 'b/' + self.path
349 350
350 351 def ispatching(self, afile, bfile):
351 352 return self._ispatchinga(afile) and self._ispatchingb(bfile)
352 353
353 354 def __repr__(self):
354 355 return "<patchmeta %s %r>" % (self.op, self.path)
355 356
356 357 def readgitpatch(lr):
357 358 """extract git-style metadata about patches from <patchname>"""
358 359
359 360 # Filter patch for git information
360 361 gp = None
361 362 gitpatches = []
362 363 for line in lr:
363 364 line = line.rstrip(' \r\n')
364 365 if line.startswith('diff --git a/'):
365 366 m = gitre.match(line)
366 367 if m:
367 368 if gp:
368 369 gitpatches.append(gp)
369 370 dst = m.group(2)
370 371 gp = patchmeta(dst)
371 372 elif gp:
372 373 if line.startswith('--- '):
373 374 gitpatches.append(gp)
374 375 gp = None
375 376 continue
376 377 if line.startswith('rename from '):
377 378 gp.op = 'RENAME'
378 379 gp.oldpath = line[12:]
379 380 elif line.startswith('rename to '):
380 381 gp.path = line[10:]
381 382 elif line.startswith('copy from '):
382 383 gp.op = 'COPY'
383 384 gp.oldpath = line[10:]
384 385 elif line.startswith('copy to '):
385 386 gp.path = line[8:]
386 387 elif line.startswith('deleted file'):
387 388 gp.op = 'DELETE'
388 389 elif line.startswith('new file mode '):
389 390 gp.op = 'ADD'
390 391 gp.setmode(int(line[-6:], 8))
391 392 elif line.startswith('new mode '):
392 393 gp.setmode(int(line[-6:], 8))
393 394 elif line.startswith('GIT binary patch'):
394 395 gp.binary = True
395 396 if gp:
396 397 gitpatches.append(gp)
397 398
398 399 return gitpatches
399 400
400 401 class linereader(object):
401 402 # simple class to allow pushing lines back into the input stream
402 403 def __init__(self, fp):
403 404 self.fp = fp
404 405 self.buf = []
405 406
406 407 def push(self, line):
407 408 if line is not None:
408 409 self.buf.append(line)
409 410
410 411 def readline(self):
411 412 if self.buf:
412 413 l = self.buf[0]
413 414 del self.buf[0]
414 415 return l
415 416 return self.fp.readline()
416 417
417 418 def __iter__(self):
418 419 return iter(self.readline, '')
419 420
420 421 class abstractbackend(object):
421 422 def __init__(self, ui):
422 423 self.ui = ui
423 424
424 425 def getfile(self, fname):
425 426 """Return target file data and flags as a (data, (islink,
426 427 isexec)) tuple. Data is None if file is missing/deleted.
427 428 """
428 429 raise NotImplementedError
429 430
430 431 def setfile(self, fname, data, mode, copysource):
431 432 """Write data to target file fname and set its mode. mode is a
432 433 (islink, isexec) tuple. If data is None, the file content should
433 434 be left unchanged. If the file is modified after being copied,
434 435 copysource is set to the original file name.
435 436 """
436 437 raise NotImplementedError
437 438
438 439 def unlink(self, fname):
439 440 """Unlink target file."""
440 441 raise NotImplementedError
441 442
442 443 def writerej(self, fname, failed, total, lines):
443 444 """Write rejected lines for fname. total is the number of hunks
444 445 which failed to apply and total the total number of hunks for this
445 446 files.
446 447 """
447 448
448 449 def exists(self, fname):
449 450 raise NotImplementedError
450 451
451 452 def close(self):
452 453 raise NotImplementedError
453 454
454 455 class fsbackend(abstractbackend):
455 456 def __init__(self, ui, basedir):
456 457 super(fsbackend, self).__init__(ui)
457 458 self.opener = vfsmod.vfs(basedir)
458 459
459 460 def getfile(self, fname):
460 461 if self.opener.islink(fname):
461 462 return (self.opener.readlink(fname), (True, False))
462 463
463 464 isexec = False
464 465 try:
465 466 isexec = self.opener.lstat(fname).st_mode & 0o100 != 0
466 467 except OSError as e:
467 468 if e.errno != errno.ENOENT:
468 469 raise
469 470 try:
470 471 return (self.opener.read(fname), (False, isexec))
471 472 except IOError as e:
472 473 if e.errno != errno.ENOENT:
473 474 raise
474 475 return None, None
475 476
476 477 def setfile(self, fname, data, mode, copysource):
477 478 islink, isexec = mode
478 479 if data is None:
479 480 self.opener.setflags(fname, islink, isexec)
480 481 return
481 482 if islink:
482 483 self.opener.symlink(data, fname)
483 484 else:
484 485 self.opener.write(fname, data)
485 486 if isexec:
486 487 self.opener.setflags(fname, False, True)
487 488
488 489 def unlink(self, fname):
489 490 self.opener.unlinkpath(fname, ignoremissing=True)
490 491
491 492 def writerej(self, fname, failed, total, lines):
492 493 fname = fname + ".rej"
493 494 self.ui.warn(
494 495 _("%d out of %d hunks FAILED -- saving rejects to file %s\n") %
495 496 (failed, total, fname))
496 497 fp = self.opener(fname, 'w')
497 498 fp.writelines(lines)
498 499 fp.close()
499 500
500 501 def exists(self, fname):
501 502 return self.opener.lexists(fname)
502 503
503 504 class workingbackend(fsbackend):
504 505 def __init__(self, ui, repo, similarity):
505 506 super(workingbackend, self).__init__(ui, repo.root)
506 507 self.repo = repo
507 508 self.similarity = similarity
508 509 self.removed = set()
509 510 self.changed = set()
510 511 self.copied = []
511 512
512 513 def _checkknown(self, fname):
513 514 if self.repo.dirstate[fname] == '?' and self.exists(fname):
514 515 raise PatchError(_('cannot patch %s: file is not tracked') % fname)
515 516
516 517 def setfile(self, fname, data, mode, copysource):
517 518 self._checkknown(fname)
518 519 super(workingbackend, self).setfile(fname, data, mode, copysource)
519 520 if copysource is not None:
520 521 self.copied.append((copysource, fname))
521 522 self.changed.add(fname)
522 523
523 524 def unlink(self, fname):
524 525 self._checkknown(fname)
525 526 super(workingbackend, self).unlink(fname)
526 527 self.removed.add(fname)
527 528 self.changed.add(fname)
528 529
529 530 def close(self):
530 531 wctx = self.repo[None]
531 532 changed = set(self.changed)
532 533 for src, dst in self.copied:
533 534 scmutil.dirstatecopy(self.ui, self.repo, wctx, src, dst)
534 535 if self.removed:
535 536 wctx.forget(sorted(self.removed))
536 537 for f in self.removed:
537 538 if f not in self.repo.dirstate:
538 539 # File was deleted and no longer belongs to the
539 540 # dirstate, it was probably marked added then
540 541 # deleted, and should not be considered by
541 542 # marktouched().
542 543 changed.discard(f)
543 544 if changed:
544 545 scmutil.marktouched(self.repo, changed, self.similarity)
545 546 return sorted(self.changed)
546 547
547 548 class filestore(object):
548 549 def __init__(self, maxsize=None):
549 550 self.opener = None
550 551 self.files = {}
551 552 self.created = 0
552 553 self.maxsize = maxsize
553 554 if self.maxsize is None:
554 555 self.maxsize = 4*(2**20)
555 556 self.size = 0
556 557 self.data = {}
557 558
558 559 def setfile(self, fname, data, mode, copied=None):
559 560 if self.maxsize < 0 or (len(data) + self.size) <= self.maxsize:
560 561 self.data[fname] = (data, mode, copied)
561 562 self.size += len(data)
562 563 else:
563 564 if self.opener is None:
564 565 root = tempfile.mkdtemp(prefix='hg-patch-')
565 566 self.opener = vfsmod.vfs(root)
566 567 # Avoid filename issues with these simple names
567 568 fn = str(self.created)
568 569 self.opener.write(fn, data)
569 570 self.created += 1
570 571 self.files[fname] = (fn, mode, copied)
571 572
572 573 def getfile(self, fname):
573 574 if fname in self.data:
574 575 return self.data[fname]
575 576 if not self.opener or fname not in self.files:
576 577 return None, None, None
577 578 fn, mode, copied = self.files[fname]
578 579 return self.opener.read(fn), mode, copied
579 580
580 581 def close(self):
581 582 if self.opener:
582 583 shutil.rmtree(self.opener.base)
583 584
584 585 class repobackend(abstractbackend):
585 586 def __init__(self, ui, repo, ctx, store):
586 587 super(repobackend, self).__init__(ui)
587 588 self.repo = repo
588 589 self.ctx = ctx
589 590 self.store = store
590 591 self.changed = set()
591 592 self.removed = set()
592 593 self.copied = {}
593 594
594 595 def _checkknown(self, fname):
595 596 if fname not in self.ctx:
596 597 raise PatchError(_('cannot patch %s: file is not tracked') % fname)
597 598
598 599 def getfile(self, fname):
599 600 try:
600 601 fctx = self.ctx[fname]
601 602 except error.LookupError:
602 603 return None, None
603 604 flags = fctx.flags()
604 605 return fctx.data(), ('l' in flags, 'x' in flags)
605 606
606 607 def setfile(self, fname, data, mode, copysource):
607 608 if copysource:
608 609 self._checkknown(copysource)
609 610 if data is None:
610 611 data = self.ctx[fname].data()
611 612 self.store.setfile(fname, data, mode, copysource)
612 613 self.changed.add(fname)
613 614 if copysource:
614 615 self.copied[fname] = copysource
615 616
616 617 def unlink(self, fname):
617 618 self._checkknown(fname)
618 619 self.removed.add(fname)
619 620
620 621 def exists(self, fname):
621 622 return fname in self.ctx
622 623
623 624 def close(self):
624 625 return self.changed | self.removed
625 626
626 627 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
627 628 unidesc = re.compile('@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@')
628 629 contextdesc = re.compile('(?:---|\*\*\*) (\d+)(?:,(\d+))? (?:---|\*\*\*)')
629 630 eolmodes = ['strict', 'crlf', 'lf', 'auto']
630 631
631 632 class patchfile(object):
632 633 def __init__(self, ui, gp, backend, store, eolmode='strict'):
633 634 self.fname = gp.path
634 635 self.eolmode = eolmode
635 636 self.eol = None
636 637 self.backend = backend
637 638 self.ui = ui
638 639 self.lines = []
639 640 self.exists = False
640 641 self.missing = True
641 642 self.mode = gp.mode
642 643 self.copysource = gp.oldpath
643 644 self.create = gp.op in ('ADD', 'COPY', 'RENAME')
644 645 self.remove = gp.op == 'DELETE'
645 646 if self.copysource is None:
646 647 data, mode = backend.getfile(self.fname)
647 648 else:
648 649 data, mode = store.getfile(self.copysource)[:2]
649 650 if data is not None:
650 651 self.exists = self.copysource is None or backend.exists(self.fname)
651 652 self.missing = False
652 653 if data:
653 654 self.lines = mdiff.splitnewlines(data)
654 655 if self.mode is None:
655 656 self.mode = mode
656 657 if self.lines:
657 658 # Normalize line endings
658 659 if self.lines[0].endswith('\r\n'):
659 660 self.eol = '\r\n'
660 661 elif self.lines[0].endswith('\n'):
661 662 self.eol = '\n'
662 663 if eolmode != 'strict':
663 664 nlines = []
664 665 for l in self.lines:
665 666 if l.endswith('\r\n'):
666 667 l = l[:-2] + '\n'
667 668 nlines.append(l)
668 669 self.lines = nlines
669 670 else:
670 671 if self.create:
671 672 self.missing = False
672 673 if self.mode is None:
673 674 self.mode = (False, False)
674 675 if self.missing:
675 676 self.ui.warn(_("unable to find '%s' for patching\n") % self.fname)
676 677 self.ui.warn(_("(use '--prefix' to apply patch relative to the "
677 678 "current directory)\n"))
678 679
679 680 self.hash = {}
680 681 self.dirty = 0
681 682 self.offset = 0
682 683 self.skew = 0
683 684 self.rej = []
684 685 self.fileprinted = False
685 686 self.printfile(False)
686 687 self.hunks = 0
687 688
688 689 def writelines(self, fname, lines, mode):
689 690 if self.eolmode == 'auto':
690 691 eol = self.eol
691 692 elif self.eolmode == 'crlf':
692 693 eol = '\r\n'
693 694 else:
694 695 eol = '\n'
695 696
696 697 if self.eolmode != 'strict' and eol and eol != '\n':
697 698 rawlines = []
698 699 for l in lines:
699 700 if l and l[-1] == '\n':
700 701 l = l[:-1] + eol
701 702 rawlines.append(l)
702 703 lines = rawlines
703 704
704 705 self.backend.setfile(fname, ''.join(lines), mode, self.copysource)
705 706
706 707 def printfile(self, warn):
707 708 if self.fileprinted:
708 709 return
709 710 if warn or self.ui.verbose:
710 711 self.fileprinted = True
711 712 s = _("patching file %s\n") % self.fname
712 713 if warn:
713 714 self.ui.warn(s)
714 715 else:
715 716 self.ui.note(s)
716 717
717 718
718 719 def findlines(self, l, linenum):
719 720 # looks through the hash and finds candidate lines. The
720 721 # result is a list of line numbers sorted based on distance
721 722 # from linenum
722 723
723 724 cand = self.hash.get(l, [])
724 725 if len(cand) > 1:
725 726 # resort our list of potentials forward then back.
726 727 cand.sort(key=lambda x: abs(x - linenum))
727 728 return cand
728 729
729 730 def write_rej(self):
730 731 # our rejects are a little different from patch(1). This always
731 732 # creates rejects in the same form as the original patch. A file
732 733 # header is inserted so that you can run the reject through patch again
733 734 # without having to type the filename.
734 735 if not self.rej:
735 736 return
736 737 base = os.path.basename(self.fname)
737 738 lines = ["--- %s\n+++ %s\n" % (base, base)]
738 739 for x in self.rej:
739 740 for l in x.hunk:
740 741 lines.append(l)
741 742 if l[-1:] != '\n':
742 743 lines.append("\n\ No newline at end of file\n")
743 744 self.backend.writerej(self.fname, len(self.rej), self.hunks, lines)
744 745
745 746 def apply(self, h):
746 747 if not h.complete():
747 748 raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
748 749 (h.number, h.desc, len(h.a), h.lena, len(h.b),
749 750 h.lenb))
750 751
751 752 self.hunks += 1
752 753
753 754 if self.missing:
754 755 self.rej.append(h)
755 756 return -1
756 757
757 758 if self.exists and self.create:
758 759 if self.copysource:
759 760 self.ui.warn(_("cannot create %s: destination already "
760 761 "exists\n") % self.fname)
761 762 else:
762 763 self.ui.warn(_("file %s already exists\n") % self.fname)
763 764 self.rej.append(h)
764 765 return -1
765 766
766 767 if isinstance(h, binhunk):
767 768 if self.remove:
768 769 self.backend.unlink(self.fname)
769 770 else:
770 771 l = h.new(self.lines)
771 772 self.lines[:] = l
772 773 self.offset += len(l)
773 774 self.dirty = True
774 775 return 0
775 776
776 777 horig = h
777 778 if (self.eolmode in ('crlf', 'lf')
778 779 or self.eolmode == 'auto' and self.eol):
779 780 # If new eols are going to be normalized, then normalize
780 781 # hunk data before patching. Otherwise, preserve input
781 782 # line-endings.
782 783 h = h.getnormalized()
783 784
784 785 # fast case first, no offsets, no fuzz
785 786 old, oldstart, new, newstart = h.fuzzit(0, False)
786 787 oldstart += self.offset
787 788 orig_start = oldstart
788 789 # if there's skew we want to emit the "(offset %d lines)" even
789 790 # when the hunk cleanly applies at start + skew, so skip the
790 791 # fast case code
791 792 if (self.skew == 0 and
792 793 diffhelpers.testhunk(old, self.lines, oldstart) == 0):
793 794 if self.remove:
794 795 self.backend.unlink(self.fname)
795 796 else:
796 797 self.lines[oldstart:oldstart + len(old)] = new
797 798 self.offset += len(new) - len(old)
798 799 self.dirty = True
799 800 return 0
800 801
801 802 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
802 803 self.hash = {}
803 804 for x, s in enumerate(self.lines):
804 805 self.hash.setdefault(s, []).append(x)
805 806
806 807 for fuzzlen in xrange(self.ui.configint("patch", "fuzz") + 1):
807 808 for toponly in [True, False]:
808 809 old, oldstart, new, newstart = h.fuzzit(fuzzlen, toponly)
809 810 oldstart = oldstart + self.offset + self.skew
810 811 oldstart = min(oldstart, len(self.lines))
811 812 if old:
812 813 cand = self.findlines(old[0][1:], oldstart)
813 814 else:
814 815 # Only adding lines with no or fuzzed context, just
815 816 # take the skew in account
816 817 cand = [oldstart]
817 818
818 819 for l in cand:
819 820 if not old or diffhelpers.testhunk(old, self.lines, l) == 0:
820 821 self.lines[l : l + len(old)] = new
821 822 self.offset += len(new) - len(old)
822 823 self.skew = l - orig_start
823 824 self.dirty = True
824 825 offset = l - orig_start - fuzzlen
825 826 if fuzzlen:
826 827 msg = _("Hunk #%d succeeded at %d "
827 828 "with fuzz %d "
828 829 "(offset %d lines).\n")
829 830 self.printfile(True)
830 831 self.ui.warn(msg %
831 832 (h.number, l + 1, fuzzlen, offset))
832 833 else:
833 834 msg = _("Hunk #%d succeeded at %d "
834 835 "(offset %d lines).\n")
835 836 self.ui.note(msg % (h.number, l + 1, offset))
836 837 return fuzzlen
837 838 self.printfile(True)
838 839 self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
839 840 self.rej.append(horig)
840 841 return -1
841 842
842 843 def close(self):
843 844 if self.dirty:
844 845 self.writelines(self.fname, self.lines, self.mode)
845 846 self.write_rej()
846 847 return len(self.rej)
847 848
848 849 class header(object):
849 850 """patch header
850 851 """
851 852 diffgit_re = re.compile('diff --git a/(.*) b/(.*)$')
852 853 diff_re = re.compile('diff -r .* (.*)$')
853 854 allhunks_re = re.compile('(?:index|deleted file) ')
854 855 pretty_re = re.compile('(?:new file|deleted file) ')
855 856 special_re = re.compile('(?:index|deleted|copy|rename) ')
856 857 newfile_re = re.compile('(?:new file)')
857 858
858 859 def __init__(self, header):
859 860 self.header = header
860 861 self.hunks = []
861 862
862 863 def binary(self):
863 864 return any(h.startswith('index ') for h in self.header)
864 865
865 866 def pretty(self, fp):
866 867 for h in self.header:
867 868 if h.startswith('index '):
868 869 fp.write(_('this modifies a binary file (all or nothing)\n'))
869 870 break
870 871 if self.pretty_re.match(h):
871 872 fp.write(h)
872 873 if self.binary():
873 874 fp.write(_('this is a binary file\n'))
874 875 break
875 876 if h.startswith('---'):
876 877 fp.write(_('%d hunks, %d lines changed\n') %
877 878 (len(self.hunks),
878 879 sum([max(h.added, h.removed) for h in self.hunks])))
879 880 break
880 881 fp.write(h)
881 882
882 883 def write(self, fp):
883 884 fp.write(''.join(self.header))
884 885
885 886 def allhunks(self):
886 887 return any(self.allhunks_re.match(h) for h in self.header)
887 888
888 889 def files(self):
889 890 match = self.diffgit_re.match(self.header[0])
890 891 if match:
891 892 fromfile, tofile = match.groups()
892 893 if fromfile == tofile:
893 894 return [fromfile]
894 895 return [fromfile, tofile]
895 896 else:
896 897 return self.diff_re.match(self.header[0]).groups()
897 898
898 899 def filename(self):
899 900 return self.files()[-1]
900 901
901 902 def __repr__(self):
902 903 return '<header %s>' % (' '.join(map(repr, self.files())))
903 904
904 905 def isnewfile(self):
905 906 return any(self.newfile_re.match(h) for h in self.header)
906 907
907 908 def special(self):
908 909 # Special files are shown only at the header level and not at the hunk
909 910 # level for example a file that has been deleted is a special file.
910 911 # The user cannot change the content of the operation, in the case of
911 912 # the deleted file he has to take the deletion or not take it, he
912 913 # cannot take some of it.
913 914 # Newly added files are special if they are empty, they are not special
914 915 # if they have some content as we want to be able to change it
915 916 nocontent = len(self.header) == 2
916 917 emptynewfile = self.isnewfile() and nocontent
917 918 return emptynewfile or \
918 919 any(self.special_re.match(h) for h in self.header)
919 920
920 921 class recordhunk(object):
921 922 """patch hunk
922 923
923 924 XXX shouldn't we merge this with the other hunk class?
924 925 """
925 926
926 927 def __init__(self, header, fromline, toline, proc, before, hunk, after,
927 928 maxcontext=None):
928 929 def trimcontext(lines, reverse=False):
929 930 if maxcontext is not None:
930 931 delta = len(lines) - maxcontext
931 932 if delta > 0:
932 933 if reverse:
933 934 return delta, lines[delta:]
934 935 else:
935 936 return delta, lines[:maxcontext]
936 937 return 0, lines
937 938
938 939 self.header = header
939 940 trimedbefore, self.before = trimcontext(before, True)
940 941 self.fromline = fromline + trimedbefore
941 942 self.toline = toline + trimedbefore
942 943 _trimedafter, self.after = trimcontext(after, False)
943 944 self.proc = proc
944 945 self.hunk = hunk
945 946 self.added, self.removed = self.countchanges(self.hunk)
946 947
947 948 def __eq__(self, v):
948 949 if not isinstance(v, recordhunk):
949 950 return False
950 951
951 952 return ((v.hunk == self.hunk) and
952 953 (v.proc == self.proc) and
953 954 (self.fromline == v.fromline) and
954 955 (self.header.files() == v.header.files()))
955 956
956 957 def __hash__(self):
957 958 return hash((tuple(self.hunk),
958 959 tuple(self.header.files()),
959 960 self.fromline,
960 961 self.proc))
961 962
962 963 def countchanges(self, hunk):
963 964 """hunk -> (n+,n-)"""
964 965 add = len([h for h in hunk if h.startswith('+')])
965 966 rem = len([h for h in hunk if h.startswith('-')])
966 967 return add, rem
967 968
968 969 def reversehunk(self):
969 970 """return another recordhunk which is the reverse of the hunk
970 971
971 972 If this hunk is diff(A, B), the returned hunk is diff(B, A). To do
972 973 that, swap fromline/toline and +/- signs while keep other things
973 974 unchanged.
974 975 """
975 976 m = {'+': '-', '-': '+', '\\': '\\'}
976 977 hunk = ['%s%s' % (m[l[0:1]], l[1:]) for l in self.hunk]
977 978 return recordhunk(self.header, self.toline, self.fromline, self.proc,
978 979 self.before, hunk, self.after)
979 980
980 981 def write(self, fp):
981 982 delta = len(self.before) + len(self.after)
982 983 if self.after and self.after[-1] == '\\ No newline at end of file\n':
983 984 delta -= 1
984 985 fromlen = delta + self.removed
985 986 tolen = delta + self.added
986 987 fp.write('@@ -%d,%d +%d,%d @@%s\n' %
987 988 (self.fromline, fromlen, self.toline, tolen,
988 989 self.proc and (' ' + self.proc)))
989 990 fp.write(''.join(self.before + self.hunk + self.after))
990 991
991 992 pretty = write
992 993
993 994 def filename(self):
994 995 return self.header.filename()
995 996
996 997 def __repr__(self):
997 998 return '<hunk %r@%d>' % (self.filename(), self.fromline)
998 999
999 1000 def getmessages():
1000 1001 return {
1001 1002 'multiple': {
1002 1003 'apply': _("apply change %d/%d to '%s'?"),
1003 1004 'discard': _("discard change %d/%d to '%s'?"),
1004 1005 'record': _("record change %d/%d to '%s'?"),
1005 1006 },
1006 1007 'single': {
1007 1008 'apply': _("apply this change to '%s'?"),
1008 1009 'discard': _("discard this change to '%s'?"),
1009 1010 'record': _("record this change to '%s'?"),
1010 1011 },
1011 1012 'help': {
1012 1013 'apply': _('[Ynesfdaq?]'
1013 1014 '$$ &Yes, apply this change'
1014 1015 '$$ &No, skip this change'
1015 1016 '$$ &Edit this change manually'
1016 1017 '$$ &Skip remaining changes to this file'
1017 1018 '$$ Apply remaining changes to this &file'
1018 1019 '$$ &Done, skip remaining changes and files'
1019 1020 '$$ Apply &all changes to all remaining files'
1020 1021 '$$ &Quit, applying no changes'
1021 1022 '$$ &? (display help)'),
1022 1023 'discard': _('[Ynesfdaq?]'
1023 1024 '$$ &Yes, discard this change'
1024 1025 '$$ &No, skip this change'
1025 1026 '$$ &Edit this change manually'
1026 1027 '$$ &Skip remaining changes to this file'
1027 1028 '$$ Discard remaining changes to this &file'
1028 1029 '$$ &Done, skip remaining changes and files'
1029 1030 '$$ Discard &all changes to all remaining files'
1030 1031 '$$ &Quit, discarding no changes'
1031 1032 '$$ &? (display help)'),
1032 1033 'record': _('[Ynesfdaq?]'
1033 1034 '$$ &Yes, record this change'
1034 1035 '$$ &No, skip this change'
1035 1036 '$$ &Edit this change manually'
1036 1037 '$$ &Skip remaining changes to this file'
1037 1038 '$$ Record remaining changes to this &file'
1038 1039 '$$ &Done, skip remaining changes and files'
1039 1040 '$$ Record &all changes to all remaining files'
1040 1041 '$$ &Quit, recording no changes'
1041 1042 '$$ &? (display help)'),
1042 1043 }
1043 1044 }
1044 1045
1045 1046 def filterpatch(ui, headers, operation=None):
1046 1047 """Interactively filter patch chunks into applied-only chunks"""
1047 1048 messages = getmessages()
1048 1049
1049 1050 if operation is None:
1050 1051 operation = 'record'
1051 1052
1052 1053 def prompt(skipfile, skipall, query, chunk):
1053 1054 """prompt query, and process base inputs
1054 1055
1055 1056 - y/n for the rest of file
1056 1057 - y/n for the rest
1057 1058 - ? (help)
1058 1059 - q (quit)
1059 1060
1060 1061 Return True/False and possibly updated skipfile and skipall.
1061 1062 """
1062 1063 newpatches = None
1063 1064 if skipall is not None:
1064 1065 return skipall, skipfile, skipall, newpatches
1065 1066 if skipfile is not None:
1066 1067 return skipfile, skipfile, skipall, newpatches
1067 1068 while True:
1068 1069 resps = messages['help'][operation]
1069 1070 r = ui.promptchoice("%s %s" % (query, resps))
1070 1071 ui.write("\n")
1071 1072 if r == 8: # ?
1072 1073 for c, t in ui.extractchoices(resps)[1]:
1073 1074 ui.write('%s - %s\n' % (c, encoding.lower(t)))
1074 1075 continue
1075 1076 elif r == 0: # yes
1076 1077 ret = True
1077 1078 elif r == 1: # no
1078 1079 ret = False
1079 1080 elif r == 2: # Edit patch
1080 1081 if chunk is None:
1081 1082 ui.write(_('cannot edit patch for whole file'))
1082 1083 ui.write("\n")
1083 1084 continue
1084 1085 if chunk.header.binary():
1085 1086 ui.write(_('cannot edit patch for binary file'))
1086 1087 ui.write("\n")
1087 1088 continue
1088 1089 # Patch comment based on the Git one (based on comment at end of
1089 1090 # https://mercurial-scm.org/wiki/RecordExtension)
1090 1091 phelp = '---' + _("""
1091 1092 To remove '-' lines, make them ' ' lines (context).
1092 1093 To remove '+' lines, delete them.
1093 1094 Lines starting with # will be removed from the patch.
1094 1095
1095 1096 If the patch applies cleanly, the edited hunk will immediately be
1096 1097 added to the record list. If it does not apply cleanly, a rejects
1097 1098 file will be generated: you can use that when you try again. If
1098 1099 all lines of the hunk are removed, then the edit is aborted and
1099 1100 the hunk is left unchanged.
1100 1101 """)
1101 1102 (patchfd, patchfn) = tempfile.mkstemp(prefix="hg-editor-",
1102 1103 suffix=".diff", text=True)
1103 1104 ncpatchfp = None
1104 1105 try:
1105 1106 # Write the initial patch
1106 1107 f = os.fdopen(patchfd, pycompat.sysstr("w"))
1107 1108 chunk.header.write(f)
1108 1109 chunk.write(f)
1109 1110 f.write('\n'.join(['# ' + i for i in phelp.splitlines()]))
1110 1111 f.close()
1111 1112 # Start the editor and wait for it to complete
1112 1113 editor = ui.geteditor()
1113 1114 ret = ui.system("%s \"%s\"" % (editor, patchfn),
1114 1115 environ={'HGUSER': ui.username()},
1115 1116 blockedtag='filterpatch')
1116 1117 if ret != 0:
1117 1118 ui.warn(_("editor exited with exit code %d\n") % ret)
1118 1119 continue
1119 1120 # Remove comment lines
1120 1121 patchfp = open(patchfn)
1121 1122 ncpatchfp = stringio()
1122 1123 for line in util.iterfile(patchfp):
1123 1124 if not line.startswith('#'):
1124 1125 ncpatchfp.write(line)
1125 1126 patchfp.close()
1126 1127 ncpatchfp.seek(0)
1127 1128 newpatches = parsepatch(ncpatchfp)
1128 1129 finally:
1129 1130 os.unlink(patchfn)
1130 1131 del ncpatchfp
1131 1132 # Signal that the chunk shouldn't be applied as-is, but
1132 1133 # provide the new patch to be used instead.
1133 1134 ret = False
1134 1135 elif r == 3: # Skip
1135 1136 ret = skipfile = False
1136 1137 elif r == 4: # file (Record remaining)
1137 1138 ret = skipfile = True
1138 1139 elif r == 5: # done, skip remaining
1139 1140 ret = skipall = False
1140 1141 elif r == 6: # all
1141 1142 ret = skipall = True
1142 1143 elif r == 7: # quit
1143 1144 raise error.Abort(_('user quit'))
1144 1145 return ret, skipfile, skipall, newpatches
1145 1146
1146 1147 seen = set()
1147 1148 applied = {} # 'filename' -> [] of chunks
1148 1149 skipfile, skipall = None, None
1149 1150 pos, total = 1, sum(len(h.hunks) for h in headers)
1150 1151 for h in headers:
1151 1152 pos += len(h.hunks)
1152 1153 skipfile = None
1153 1154 fixoffset = 0
1154 1155 hdr = ''.join(h.header)
1155 1156 if hdr in seen:
1156 1157 continue
1157 1158 seen.add(hdr)
1158 1159 if skipall is None:
1159 1160 h.pretty(ui)
1160 1161 msg = (_('examine changes to %s?') %
1161 1162 _(' and ').join("'%s'" % f for f in h.files()))
1162 1163 r, skipfile, skipall, np = prompt(skipfile, skipall, msg, None)
1163 1164 if not r:
1164 1165 continue
1165 1166 applied[h.filename()] = [h]
1166 1167 if h.allhunks():
1167 1168 applied[h.filename()] += h.hunks
1168 1169 continue
1169 1170 for i, chunk in enumerate(h.hunks):
1170 1171 if skipfile is None and skipall is None:
1171 1172 chunk.pretty(ui)
1172 1173 if total == 1:
1173 1174 msg = messages['single'][operation] % chunk.filename()
1174 1175 else:
1175 1176 idx = pos - len(h.hunks) + i
1176 1177 msg = messages['multiple'][operation] % (idx, total,
1177 1178 chunk.filename())
1178 1179 r, skipfile, skipall, newpatches = prompt(skipfile,
1179 1180 skipall, msg, chunk)
1180 1181 if r:
1181 1182 if fixoffset:
1182 1183 chunk = copy.copy(chunk)
1183 1184 chunk.toline += fixoffset
1184 1185 applied[chunk.filename()].append(chunk)
1185 1186 elif newpatches is not None:
1186 1187 for newpatch in newpatches:
1187 1188 for newhunk in newpatch.hunks:
1188 1189 if fixoffset:
1189 1190 newhunk.toline += fixoffset
1190 1191 applied[newhunk.filename()].append(newhunk)
1191 1192 else:
1192 1193 fixoffset += chunk.removed - chunk.added
1193 1194 return (sum([h for h in applied.itervalues()
1194 1195 if h[0].special() or len(h) > 1], []), {})
1195 1196 class hunk(object):
1196 1197 def __init__(self, desc, num, lr, context):
1197 1198 self.number = num
1198 1199 self.desc = desc
1199 1200 self.hunk = [desc]
1200 1201 self.a = []
1201 1202 self.b = []
1202 1203 self.starta = self.lena = None
1203 1204 self.startb = self.lenb = None
1204 1205 if lr is not None:
1205 1206 if context:
1206 1207 self.read_context_hunk(lr)
1207 1208 else:
1208 1209 self.read_unified_hunk(lr)
1209 1210
1210 1211 def getnormalized(self):
1211 1212 """Return a copy with line endings normalized to LF."""
1212 1213
1213 1214 def normalize(lines):
1214 1215 nlines = []
1215 1216 for line in lines:
1216 1217 if line.endswith('\r\n'):
1217 1218 line = line[:-2] + '\n'
1218 1219 nlines.append(line)
1219 1220 return nlines
1220 1221
1221 1222 # Dummy object, it is rebuilt manually
1222 1223 nh = hunk(self.desc, self.number, None, None)
1223 1224 nh.number = self.number
1224 1225 nh.desc = self.desc
1225 1226 nh.hunk = self.hunk
1226 1227 nh.a = normalize(self.a)
1227 1228 nh.b = normalize(self.b)
1228 1229 nh.starta = self.starta
1229 1230 nh.startb = self.startb
1230 1231 nh.lena = self.lena
1231 1232 nh.lenb = self.lenb
1232 1233 return nh
1233 1234
1234 1235 def read_unified_hunk(self, lr):
1235 1236 m = unidesc.match(self.desc)
1236 1237 if not m:
1237 1238 raise PatchError(_("bad hunk #%d") % self.number)
1238 1239 self.starta, self.lena, self.startb, self.lenb = m.groups()
1239 1240 if self.lena is None:
1240 1241 self.lena = 1
1241 1242 else:
1242 1243 self.lena = int(self.lena)
1243 1244 if self.lenb is None:
1244 1245 self.lenb = 1
1245 1246 else:
1246 1247 self.lenb = int(self.lenb)
1247 1248 self.starta = int(self.starta)
1248 1249 self.startb = int(self.startb)
1249 1250 diffhelpers.addlines(lr, self.hunk, self.lena, self.lenb, self.a,
1250 1251 self.b)
1251 1252 # if we hit eof before finishing out the hunk, the last line will
1252 1253 # be zero length. Lets try to fix it up.
1253 1254 while len(self.hunk[-1]) == 0:
1254 1255 del self.hunk[-1]
1255 1256 del self.a[-1]
1256 1257 del self.b[-1]
1257 1258 self.lena -= 1
1258 1259 self.lenb -= 1
1259 1260 self._fixnewline(lr)
1260 1261
1261 1262 def read_context_hunk(self, lr):
1262 1263 self.desc = lr.readline()
1263 1264 m = contextdesc.match(self.desc)
1264 1265 if not m:
1265 1266 raise PatchError(_("bad hunk #%d") % self.number)
1266 1267 self.starta, aend = m.groups()
1267 1268 self.starta = int(self.starta)
1268 1269 if aend is None:
1269 1270 aend = self.starta
1270 1271 self.lena = int(aend) - self.starta
1271 1272 if self.starta:
1272 1273 self.lena += 1
1273 1274 for x in xrange(self.lena):
1274 1275 l = lr.readline()
1275 1276 if l.startswith('---'):
1276 1277 # lines addition, old block is empty
1277 1278 lr.push(l)
1278 1279 break
1279 1280 s = l[2:]
1280 1281 if l.startswith('- ') or l.startswith('! '):
1281 1282 u = '-' + s
1282 1283 elif l.startswith(' '):
1283 1284 u = ' ' + s
1284 1285 else:
1285 1286 raise PatchError(_("bad hunk #%d old text line %d") %
1286 1287 (self.number, x))
1287 1288 self.a.append(u)
1288 1289 self.hunk.append(u)
1289 1290
1290 1291 l = lr.readline()
1291 1292 if l.startswith('\ '):
1292 1293 s = self.a[-1][:-1]
1293 1294 self.a[-1] = s
1294 1295 self.hunk[-1] = s
1295 1296 l = lr.readline()
1296 1297 m = contextdesc.match(l)
1297 1298 if not m:
1298 1299 raise PatchError(_("bad hunk #%d") % self.number)
1299 1300 self.startb, bend = m.groups()
1300 1301 self.startb = int(self.startb)
1301 1302 if bend is None:
1302 1303 bend = self.startb
1303 1304 self.lenb = int(bend) - self.startb
1304 1305 if self.startb:
1305 1306 self.lenb += 1
1306 1307 hunki = 1
1307 1308 for x in xrange(self.lenb):
1308 1309 l = lr.readline()
1309 1310 if l.startswith('\ '):
1310 1311 # XXX: the only way to hit this is with an invalid line range.
1311 1312 # The no-eol marker is not counted in the line range, but I
1312 1313 # guess there are diff(1) out there which behave differently.
1313 1314 s = self.b[-1][:-1]
1314 1315 self.b[-1] = s
1315 1316 self.hunk[hunki - 1] = s
1316 1317 continue
1317 1318 if not l:
1318 1319 # line deletions, new block is empty and we hit EOF
1319 1320 lr.push(l)
1320 1321 break
1321 1322 s = l[2:]
1322 1323 if l.startswith('+ ') or l.startswith('! '):
1323 1324 u = '+' + s
1324 1325 elif l.startswith(' '):
1325 1326 u = ' ' + s
1326 1327 elif len(self.b) == 0:
1327 1328 # line deletions, new block is empty
1328 1329 lr.push(l)
1329 1330 break
1330 1331 else:
1331 1332 raise PatchError(_("bad hunk #%d old text line %d") %
1332 1333 (self.number, x))
1333 1334 self.b.append(s)
1334 1335 while True:
1335 1336 if hunki >= len(self.hunk):
1336 1337 h = ""
1337 1338 else:
1338 1339 h = self.hunk[hunki]
1339 1340 hunki += 1
1340 1341 if h == u:
1341 1342 break
1342 1343 elif h.startswith('-'):
1343 1344 continue
1344 1345 else:
1345 1346 self.hunk.insert(hunki - 1, u)
1346 1347 break
1347 1348
1348 1349 if not self.a:
1349 1350 # this happens when lines were only added to the hunk
1350 1351 for x in self.hunk:
1351 1352 if x.startswith('-') or x.startswith(' '):
1352 1353 self.a.append(x)
1353 1354 if not self.b:
1354 1355 # this happens when lines were only deleted from the hunk
1355 1356 for x in self.hunk:
1356 1357 if x.startswith('+') or x.startswith(' '):
1357 1358 self.b.append(x[1:])
1358 1359 # @@ -start,len +start,len @@
1359 1360 self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
1360 1361 self.startb, self.lenb)
1361 1362 self.hunk[0] = self.desc
1362 1363 self._fixnewline(lr)
1363 1364
1364 1365 def _fixnewline(self, lr):
1365 1366 l = lr.readline()
1366 1367 if l.startswith('\ '):
1367 1368 diffhelpers.fix_newline(self.hunk, self.a, self.b)
1368 1369 else:
1369 1370 lr.push(l)
1370 1371
1371 1372 def complete(self):
1372 1373 return len(self.a) == self.lena and len(self.b) == self.lenb
1373 1374
1374 1375 def _fuzzit(self, old, new, fuzz, toponly):
1375 1376 # this removes context lines from the top and bottom of list 'l'. It
1376 1377 # checks the hunk to make sure only context lines are removed, and then
1377 1378 # returns a new shortened list of lines.
1378 1379 fuzz = min(fuzz, len(old))
1379 1380 if fuzz:
1380 1381 top = 0
1381 1382 bot = 0
1382 1383 hlen = len(self.hunk)
1383 1384 for x in xrange(hlen - 1):
1384 1385 # the hunk starts with the @@ line, so use x+1
1385 1386 if self.hunk[x + 1][0] == ' ':
1386 1387 top += 1
1387 1388 else:
1388 1389 break
1389 1390 if not toponly:
1390 1391 for x in xrange(hlen - 1):
1391 1392 if self.hunk[hlen - bot - 1][0] == ' ':
1392 1393 bot += 1
1393 1394 else:
1394 1395 break
1395 1396
1396 1397 bot = min(fuzz, bot)
1397 1398 top = min(fuzz, top)
1398 1399 return old[top:len(old) - bot], new[top:len(new) - bot], top
1399 1400 return old, new, 0
1400 1401
1401 1402 def fuzzit(self, fuzz, toponly):
1402 1403 old, new, top = self._fuzzit(self.a, self.b, fuzz, toponly)
1403 1404 oldstart = self.starta + top
1404 1405 newstart = self.startb + top
1405 1406 # zero length hunk ranges already have their start decremented
1406 1407 if self.lena and oldstart > 0:
1407 1408 oldstart -= 1
1408 1409 if self.lenb and newstart > 0:
1409 1410 newstart -= 1
1410 1411 return old, oldstart, new, newstart
1411 1412
1412 1413 class binhunk(object):
1413 1414 'A binary patch file.'
1414 1415 def __init__(self, lr, fname):
1415 1416 self.text = None
1416 1417 self.delta = False
1417 1418 self.hunk = ['GIT binary patch\n']
1418 1419 self._fname = fname
1419 1420 self._read(lr)
1420 1421
1421 1422 def complete(self):
1422 1423 return self.text is not None
1423 1424
1424 1425 def new(self, lines):
1425 1426 if self.delta:
1426 1427 return [applybindelta(self.text, ''.join(lines))]
1427 1428 return [self.text]
1428 1429
1429 1430 def _read(self, lr):
1430 1431 def getline(lr, hunk):
1431 1432 l = lr.readline()
1432 1433 hunk.append(l)
1433 1434 return l.rstrip('\r\n')
1434 1435
1435 1436 size = 0
1436 1437 while True:
1437 1438 line = getline(lr, self.hunk)
1438 1439 if not line:
1439 1440 raise PatchError(_('could not extract "%s" binary data')
1440 1441 % self._fname)
1441 1442 if line.startswith('literal '):
1442 1443 size = int(line[8:].rstrip())
1443 1444 break
1444 1445 if line.startswith('delta '):
1445 1446 size = int(line[6:].rstrip())
1446 1447 self.delta = True
1447 1448 break
1448 1449 dec = []
1449 1450 line = getline(lr, self.hunk)
1450 1451 while len(line) > 1:
1451 1452 l = line[0]
1452 1453 if l <= 'Z' and l >= 'A':
1453 1454 l = ord(l) - ord('A') + 1
1454 1455 else:
1455 1456 l = ord(l) - ord('a') + 27
1456 1457 try:
1457 1458 dec.append(util.b85decode(line[1:])[:l])
1458 1459 except ValueError as e:
1459 1460 raise PatchError(_('could not decode "%s" binary patch: %s')
1460 1461 % (self._fname, str(e)))
1461 1462 line = getline(lr, self.hunk)
1462 1463 text = zlib.decompress(''.join(dec))
1463 1464 if len(text) != size:
1464 1465 raise PatchError(_('"%s" length is %d bytes, should be %d')
1465 1466 % (self._fname, len(text), size))
1466 1467 self.text = text
1467 1468
1468 1469 def parsefilename(str):
1469 1470 # --- filename \t|space stuff
1470 1471 s = str[4:].rstrip('\r\n')
1471 1472 i = s.find('\t')
1472 1473 if i < 0:
1473 1474 i = s.find(' ')
1474 1475 if i < 0:
1475 1476 return s
1476 1477 return s[:i]
1477 1478
1478 1479 def reversehunks(hunks):
1479 1480 '''reverse the signs in the hunks given as argument
1480 1481
1481 1482 This function operates on hunks coming out of patch.filterpatch, that is
1482 1483 a list of the form: [header1, hunk1, hunk2, header2...]. Example usage:
1483 1484
1484 1485 >>> rawpatch = b"""diff --git a/folder1/g b/folder1/g
1485 1486 ... --- a/folder1/g
1486 1487 ... +++ b/folder1/g
1487 1488 ... @@ -1,7 +1,7 @@
1488 1489 ... +firstline
1489 1490 ... c
1490 1491 ... 1
1491 1492 ... 2
1492 1493 ... + 3
1493 1494 ... -4
1494 1495 ... 5
1495 1496 ... d
1496 1497 ... +lastline"""
1497 1498 >>> hunks = parsepatch([rawpatch])
1498 1499 >>> hunkscomingfromfilterpatch = []
1499 1500 >>> for h in hunks:
1500 1501 ... hunkscomingfromfilterpatch.append(h)
1501 1502 ... hunkscomingfromfilterpatch.extend(h.hunks)
1502 1503
1503 1504 >>> reversedhunks = reversehunks(hunkscomingfromfilterpatch)
1504 1505 >>> from . import util
1505 1506 >>> fp = util.stringio()
1506 1507 >>> for c in reversedhunks:
1507 1508 ... c.write(fp)
1508 1509 >>> fp.seek(0) or None
1509 1510 >>> reversedpatch = fp.read()
1510 1511 >>> print(pycompat.sysstr(reversedpatch))
1511 1512 diff --git a/folder1/g b/folder1/g
1512 1513 --- a/folder1/g
1513 1514 +++ b/folder1/g
1514 1515 @@ -1,4 +1,3 @@
1515 1516 -firstline
1516 1517 c
1517 1518 1
1518 1519 2
1519 1520 @@ -2,6 +1,6 @@
1520 1521 c
1521 1522 1
1522 1523 2
1523 1524 - 3
1524 1525 +4
1525 1526 5
1526 1527 d
1527 1528 @@ -6,3 +5,2 @@
1528 1529 5
1529 1530 d
1530 1531 -lastline
1531 1532
1532 1533 '''
1533 1534
1534 1535 newhunks = []
1535 1536 for c in hunks:
1536 1537 if util.safehasattr(c, 'reversehunk'):
1537 1538 c = c.reversehunk()
1538 1539 newhunks.append(c)
1539 1540 return newhunks
1540 1541
1541 1542 def parsepatch(originalchunks, maxcontext=None):
1542 1543 """patch -> [] of headers -> [] of hunks
1543 1544
1544 1545 If maxcontext is not None, trim context lines if necessary.
1545 1546
1546 1547 >>> rawpatch = b'''diff --git a/folder1/g b/folder1/g
1547 1548 ... --- a/folder1/g
1548 1549 ... +++ b/folder1/g
1549 1550 ... @@ -1,8 +1,10 @@
1550 1551 ... 1
1551 1552 ... 2
1552 1553 ... -3
1553 1554 ... 4
1554 1555 ... 5
1555 1556 ... 6
1556 1557 ... +6.1
1557 1558 ... +6.2
1558 1559 ... 7
1559 1560 ... 8
1560 1561 ... +9'''
1561 1562 >>> out = util.stringio()
1562 1563 >>> headers = parsepatch([rawpatch], maxcontext=1)
1563 1564 >>> for header in headers:
1564 1565 ... header.write(out)
1565 1566 ... for hunk in header.hunks:
1566 1567 ... hunk.write(out)
1567 1568 >>> print(pycompat.sysstr(out.getvalue()))
1568 1569 diff --git a/folder1/g b/folder1/g
1569 1570 --- a/folder1/g
1570 1571 +++ b/folder1/g
1571 1572 @@ -2,3 +2,2 @@
1572 1573 2
1573 1574 -3
1574 1575 4
1575 1576 @@ -6,2 +5,4 @@
1576 1577 6
1577 1578 +6.1
1578 1579 +6.2
1579 1580 7
1580 1581 @@ -8,1 +9,2 @@
1581 1582 8
1582 1583 +9
1583 1584 """
1584 1585 class parser(object):
1585 1586 """patch parsing state machine"""
1586 1587 def __init__(self):
1587 1588 self.fromline = 0
1588 1589 self.toline = 0
1589 1590 self.proc = ''
1590 1591 self.header = None
1591 1592 self.context = []
1592 1593 self.before = []
1593 1594 self.hunk = []
1594 1595 self.headers = []
1595 1596
1596 1597 def addrange(self, limits):
1597 1598 fromstart, fromend, tostart, toend, proc = limits
1598 1599 self.fromline = int(fromstart)
1599 1600 self.toline = int(tostart)
1600 1601 self.proc = proc
1601 1602
1602 1603 def addcontext(self, context):
1603 1604 if self.hunk:
1604 1605 h = recordhunk(self.header, self.fromline, self.toline,
1605 1606 self.proc, self.before, self.hunk, context, maxcontext)
1606 1607 self.header.hunks.append(h)
1607 1608 self.fromline += len(self.before) + h.removed
1608 1609 self.toline += len(self.before) + h.added
1609 1610 self.before = []
1610 1611 self.hunk = []
1611 1612 self.context = context
1612 1613
1613 1614 def addhunk(self, hunk):
1614 1615 if self.context:
1615 1616 self.before = self.context
1616 1617 self.context = []
1617 1618 self.hunk = hunk
1618 1619
1619 1620 def newfile(self, hdr):
1620 1621 self.addcontext([])
1621 1622 h = header(hdr)
1622 1623 self.headers.append(h)
1623 1624 self.header = h
1624 1625
1625 1626 def addother(self, line):
1626 1627 pass # 'other' lines are ignored
1627 1628
1628 1629 def finished(self):
1629 1630 self.addcontext([])
1630 1631 return self.headers
1631 1632
1632 1633 transitions = {
1633 1634 'file': {'context': addcontext,
1634 1635 'file': newfile,
1635 1636 'hunk': addhunk,
1636 1637 'range': addrange},
1637 1638 'context': {'file': newfile,
1638 1639 'hunk': addhunk,
1639 1640 'range': addrange,
1640 1641 'other': addother},
1641 1642 'hunk': {'context': addcontext,
1642 1643 'file': newfile,
1643 1644 'range': addrange},
1644 1645 'range': {'context': addcontext,
1645 1646 'hunk': addhunk},
1646 1647 'other': {'other': addother},
1647 1648 }
1648 1649
1649 1650 p = parser()
1650 1651 fp = stringio()
1651 1652 fp.write(''.join(originalchunks))
1652 1653 fp.seek(0)
1653 1654
1654 1655 state = 'context'
1655 1656 for newstate, data in scanpatch(fp):
1656 1657 try:
1657 1658 p.transitions[state][newstate](p, data)
1658 1659 except KeyError:
1659 1660 raise PatchError('unhandled transition: %s -> %s' %
1660 1661 (state, newstate))
1661 1662 state = newstate
1662 1663 del fp
1663 1664 return p.finished()
1664 1665
1665 1666 def pathtransform(path, strip, prefix):
1666 1667 '''turn a path from a patch into a path suitable for the repository
1667 1668
1668 1669 prefix, if not empty, is expected to be normalized with a / at the end.
1669 1670
1670 1671 Returns (stripped components, path in repository).
1671 1672
1672 1673 >>> pathtransform(b'a/b/c', 0, b'')
1673 1674 ('', 'a/b/c')
1674 1675 >>> pathtransform(b' a/b/c ', 0, b'')
1675 1676 ('', ' a/b/c')
1676 1677 >>> pathtransform(b' a/b/c ', 2, b'')
1677 1678 ('a/b/', 'c')
1678 1679 >>> pathtransform(b'a/b/c', 0, b'd/e/')
1679 1680 ('', 'd/e/a/b/c')
1680 1681 >>> pathtransform(b' a//b/c ', 2, b'd/e/')
1681 1682 ('a//b/', 'd/e/c')
1682 1683 >>> pathtransform(b'a/b/c', 3, b'')
1683 1684 Traceback (most recent call last):
1684 1685 PatchError: unable to strip away 1 of 3 dirs from a/b/c
1685 1686 '''
1686 1687 pathlen = len(path)
1687 1688 i = 0
1688 1689 if strip == 0:
1689 1690 return '', prefix + path.rstrip()
1690 1691 count = strip
1691 1692 while count > 0:
1692 1693 i = path.find('/', i)
1693 1694 if i == -1:
1694 1695 raise PatchError(_("unable to strip away %d of %d dirs from %s") %
1695 1696 (count, strip, path))
1696 1697 i += 1
1697 1698 # consume '//' in the path
1698 1699 while i < pathlen - 1 and path[i:i + 1] == '/':
1699 1700 i += 1
1700 1701 count -= 1
1701 1702 return path[:i].lstrip(), prefix + path[i:].rstrip()
1702 1703
1703 1704 def makepatchmeta(backend, afile_orig, bfile_orig, hunk, strip, prefix):
1704 1705 nulla = afile_orig == "/dev/null"
1705 1706 nullb = bfile_orig == "/dev/null"
1706 1707 create = nulla and hunk.starta == 0 and hunk.lena == 0
1707 1708 remove = nullb and hunk.startb == 0 and hunk.lenb == 0
1708 1709 abase, afile = pathtransform(afile_orig, strip, prefix)
1709 1710 gooda = not nulla and backend.exists(afile)
1710 1711 bbase, bfile = pathtransform(bfile_orig, strip, prefix)
1711 1712 if afile == bfile:
1712 1713 goodb = gooda
1713 1714 else:
1714 1715 goodb = not nullb and backend.exists(bfile)
1715 1716 missing = not goodb and not gooda and not create
1716 1717
1717 1718 # some diff programs apparently produce patches where the afile is
1718 1719 # not /dev/null, but afile starts with bfile
1719 1720 abasedir = afile[:afile.rfind('/') + 1]
1720 1721 bbasedir = bfile[:bfile.rfind('/') + 1]
1721 1722 if (missing and abasedir == bbasedir and afile.startswith(bfile)
1722 1723 and hunk.starta == 0 and hunk.lena == 0):
1723 1724 create = True
1724 1725 missing = False
1725 1726
1726 1727 # If afile is "a/b/foo" and bfile is "a/b/foo.orig" we assume the
1727 1728 # diff is between a file and its backup. In this case, the original
1728 1729 # file should be patched (see original mpatch code).
1729 1730 isbackup = (abase == bbase and bfile.startswith(afile))
1730 1731 fname = None
1731 1732 if not missing:
1732 1733 if gooda and goodb:
1733 1734 if isbackup:
1734 1735 fname = afile
1735 1736 else:
1736 1737 fname = bfile
1737 1738 elif gooda:
1738 1739 fname = afile
1739 1740
1740 1741 if not fname:
1741 1742 if not nullb:
1742 1743 if isbackup:
1743 1744 fname = afile
1744 1745 else:
1745 1746 fname = bfile
1746 1747 elif not nulla:
1747 1748 fname = afile
1748 1749 else:
1749 1750 raise PatchError(_("undefined source and destination files"))
1750 1751
1751 1752 gp = patchmeta(fname)
1752 1753 if create:
1753 1754 gp.op = 'ADD'
1754 1755 elif remove:
1755 1756 gp.op = 'DELETE'
1756 1757 return gp
1757 1758
1758 1759 def scanpatch(fp):
1759 1760 """like patch.iterhunks, but yield different events
1760 1761
1761 1762 - ('file', [header_lines + fromfile + tofile])
1762 1763 - ('context', [context_lines])
1763 1764 - ('hunk', [hunk_lines])
1764 1765 - ('range', (-start,len, +start,len, proc))
1765 1766 """
1766 1767 lines_re = re.compile(br'@@ -(\d+),(\d+) \+(\d+),(\d+) @@\s*(.*)')
1767 1768 lr = linereader(fp)
1768 1769
1769 1770 def scanwhile(first, p):
1770 1771 """scan lr while predicate holds"""
1771 1772 lines = [first]
1772 1773 for line in iter(lr.readline, ''):
1773 1774 if p(line):
1774 1775 lines.append(line)
1775 1776 else:
1776 1777 lr.push(line)
1777 1778 break
1778 1779 return lines
1779 1780
1780 1781 for line in iter(lr.readline, ''):
1781 1782 if line.startswith('diff --git a/') or line.startswith('diff -r '):
1782 1783 def notheader(line):
1783 1784 s = line.split(None, 1)
1784 1785 return not s or s[0] not in ('---', 'diff')
1785 1786 header = scanwhile(line, notheader)
1786 1787 fromfile = lr.readline()
1787 1788 if fromfile.startswith('---'):
1788 1789 tofile = lr.readline()
1789 1790 header += [fromfile, tofile]
1790 1791 else:
1791 1792 lr.push(fromfile)
1792 1793 yield 'file', header
1793 1794 elif line[0:1] == ' ':
1794 1795 yield 'context', scanwhile(line, lambda l: l[0] in ' \\')
1795 1796 elif line[0] in '-+':
1796 1797 yield 'hunk', scanwhile(line, lambda l: l[0] in '-+\\')
1797 1798 else:
1798 1799 m = lines_re.match(line)
1799 1800 if m:
1800 1801 yield 'range', m.groups()
1801 1802 else:
1802 1803 yield 'other', line
1803 1804
1804 1805 def scangitpatch(lr, firstline):
1805 1806 """
1806 1807 Git patches can emit:
1807 1808 - rename a to b
1808 1809 - change b
1809 1810 - copy a to c
1810 1811 - change c
1811 1812
1812 1813 We cannot apply this sequence as-is, the renamed 'a' could not be
1813 1814 found for it would have been renamed already. And we cannot copy
1814 1815 from 'b' instead because 'b' would have been changed already. So
1815 1816 we scan the git patch for copy and rename commands so we can
1816 1817 perform the copies ahead of time.
1817 1818 """
1818 1819 pos = 0
1819 1820 try:
1820 1821 pos = lr.fp.tell()
1821 1822 fp = lr.fp
1822 1823 except IOError:
1823 1824 fp = stringio(lr.fp.read())
1824 1825 gitlr = linereader(fp)
1825 1826 gitlr.push(firstline)
1826 1827 gitpatches = readgitpatch(gitlr)
1827 1828 fp.seek(pos)
1828 1829 return gitpatches
1829 1830
1830 1831 def iterhunks(fp):
1831 1832 """Read a patch and yield the following events:
1832 1833 - ("file", afile, bfile, firsthunk): select a new target file.
1833 1834 - ("hunk", hunk): a new hunk is ready to be applied, follows a
1834 1835 "file" event.
1835 1836 - ("git", gitchanges): current diff is in git format, gitchanges
1836 1837 maps filenames to gitpatch records. Unique event.
1837 1838 """
1838 1839 afile = ""
1839 1840 bfile = ""
1840 1841 state = None
1841 1842 hunknum = 0
1842 1843 emitfile = newfile = False
1843 1844 gitpatches = None
1844 1845
1845 1846 # our states
1846 1847 BFILE = 1
1847 1848 context = None
1848 1849 lr = linereader(fp)
1849 1850
1850 1851 for x in iter(lr.readline, ''):
1851 1852 if state == BFILE and (
1852 1853 (not context and x[0] == '@')
1853 1854 or (context is not False and x.startswith('***************'))
1854 1855 or x.startswith('GIT binary patch')):
1855 1856 gp = None
1856 1857 if (gitpatches and
1857 1858 gitpatches[-1].ispatching(afile, bfile)):
1858 1859 gp = gitpatches.pop()
1859 1860 if x.startswith('GIT binary patch'):
1860 1861 h = binhunk(lr, gp.path)
1861 1862 else:
1862 1863 if context is None and x.startswith('***************'):
1863 1864 context = True
1864 1865 h = hunk(x, hunknum + 1, lr, context)
1865 1866 hunknum += 1
1866 1867 if emitfile:
1867 1868 emitfile = False
1868 1869 yield 'file', (afile, bfile, h, gp and gp.copy() or None)
1869 1870 yield 'hunk', h
1870 1871 elif x.startswith('diff --git a/'):
1871 1872 m = gitre.match(x.rstrip(' \r\n'))
1872 1873 if not m:
1873 1874 continue
1874 1875 if gitpatches is None:
1875 1876 # scan whole input for git metadata
1876 1877 gitpatches = scangitpatch(lr, x)
1877 1878 yield 'git', [g.copy() for g in gitpatches
1878 1879 if g.op in ('COPY', 'RENAME')]
1879 1880 gitpatches.reverse()
1880 1881 afile = 'a/' + m.group(1)
1881 1882 bfile = 'b/' + m.group(2)
1882 1883 while gitpatches and not gitpatches[-1].ispatching(afile, bfile):
1883 1884 gp = gitpatches.pop()
1884 1885 yield 'file', ('a/' + gp.path, 'b/' + gp.path, None, gp.copy())
1885 1886 if not gitpatches:
1886 1887 raise PatchError(_('failed to synchronize metadata for "%s"')
1887 1888 % afile[2:])
1888 1889 gp = gitpatches[-1]
1889 1890 newfile = True
1890 1891 elif x.startswith('---'):
1891 1892 # check for a unified diff
1892 1893 l2 = lr.readline()
1893 1894 if not l2.startswith('+++'):
1894 1895 lr.push(l2)
1895 1896 continue
1896 1897 newfile = True
1897 1898 context = False
1898 1899 afile = parsefilename(x)
1899 1900 bfile = parsefilename(l2)
1900 1901 elif x.startswith('***'):
1901 1902 # check for a context diff
1902 1903 l2 = lr.readline()
1903 1904 if not l2.startswith('---'):
1904 1905 lr.push(l2)
1905 1906 continue
1906 1907 l3 = lr.readline()
1907 1908 lr.push(l3)
1908 1909 if not l3.startswith("***************"):
1909 1910 lr.push(l2)
1910 1911 continue
1911 1912 newfile = True
1912 1913 context = True
1913 1914 afile = parsefilename(x)
1914 1915 bfile = parsefilename(l2)
1915 1916
1916 1917 if newfile:
1917 1918 newfile = False
1918 1919 emitfile = True
1919 1920 state = BFILE
1920 1921 hunknum = 0
1921 1922
1922 1923 while gitpatches:
1923 1924 gp = gitpatches.pop()
1924 1925 yield 'file', ('a/' + gp.path, 'b/' + gp.path, None, gp.copy())
1925 1926
1926 1927 def applybindelta(binchunk, data):
1927 1928 """Apply a binary delta hunk
1928 1929 The algorithm used is the algorithm from git's patch-delta.c
1929 1930 """
1930 1931 def deltahead(binchunk):
1931 1932 i = 0
1932 1933 for c in binchunk:
1933 1934 i += 1
1934 1935 if not (ord(c) & 0x80):
1935 1936 return i
1936 1937 return i
1937 1938 out = ""
1938 1939 s = deltahead(binchunk)
1939 1940 binchunk = binchunk[s:]
1940 1941 s = deltahead(binchunk)
1941 1942 binchunk = binchunk[s:]
1942 1943 i = 0
1943 1944 while i < len(binchunk):
1944 1945 cmd = ord(binchunk[i])
1945 1946 i += 1
1946 1947 if (cmd & 0x80):
1947 1948 offset = 0
1948 1949 size = 0
1949 1950 if (cmd & 0x01):
1950 1951 offset = ord(binchunk[i])
1951 1952 i += 1
1952 1953 if (cmd & 0x02):
1953 1954 offset |= ord(binchunk[i]) << 8
1954 1955 i += 1
1955 1956 if (cmd & 0x04):
1956 1957 offset |= ord(binchunk[i]) << 16
1957 1958 i += 1
1958 1959 if (cmd & 0x08):
1959 1960 offset |= ord(binchunk[i]) << 24
1960 1961 i += 1
1961 1962 if (cmd & 0x10):
1962 1963 size = ord(binchunk[i])
1963 1964 i += 1
1964 1965 if (cmd & 0x20):
1965 1966 size |= ord(binchunk[i]) << 8
1966 1967 i += 1
1967 1968 if (cmd & 0x40):
1968 1969 size |= ord(binchunk[i]) << 16
1969 1970 i += 1
1970 1971 if size == 0:
1971 1972 size = 0x10000
1972 1973 offset_end = offset + size
1973 1974 out += data[offset:offset_end]
1974 1975 elif cmd != 0:
1975 1976 offset_end = i + cmd
1976 1977 out += binchunk[i:offset_end]
1977 1978 i += cmd
1978 1979 else:
1979 1980 raise PatchError(_('unexpected delta opcode 0'))
1980 1981 return out
1981 1982
1982 1983 def applydiff(ui, fp, backend, store, strip=1, prefix='', eolmode='strict'):
1983 1984 """Reads a patch from fp and tries to apply it.
1984 1985
1985 1986 Returns 0 for a clean patch, -1 if any rejects were found and 1 if
1986 1987 there was any fuzz.
1987 1988
1988 1989 If 'eolmode' is 'strict', the patch content and patched file are
1989 1990 read in binary mode. Otherwise, line endings are ignored when
1990 1991 patching then normalized according to 'eolmode'.
1991 1992 """
1992 1993 return _applydiff(ui, fp, patchfile, backend, store, strip=strip,
1993 1994 prefix=prefix, eolmode=eolmode)
1994 1995
1995 1996 def _canonprefix(repo, prefix):
1996 1997 if prefix:
1997 1998 prefix = pathutil.canonpath(repo.root, repo.getcwd(), prefix)
1998 1999 if prefix != '':
1999 2000 prefix += '/'
2000 2001 return prefix
2001 2002
2002 2003 def _applydiff(ui, fp, patcher, backend, store, strip=1, prefix='',
2003 2004 eolmode='strict'):
2004 2005 prefix = _canonprefix(backend.repo, prefix)
2005 2006 def pstrip(p):
2006 2007 return pathtransform(p, strip - 1, prefix)[1]
2007 2008
2008 2009 rejects = 0
2009 2010 err = 0
2010 2011 current_file = None
2011 2012
2012 2013 for state, values in iterhunks(fp):
2013 2014 if state == 'hunk':
2014 2015 if not current_file:
2015 2016 continue
2016 2017 ret = current_file.apply(values)
2017 2018 if ret > 0:
2018 2019 err = 1
2019 2020 elif state == 'file':
2020 2021 if current_file:
2021 2022 rejects += current_file.close()
2022 2023 current_file = None
2023 2024 afile, bfile, first_hunk, gp = values
2024 2025 if gp:
2025 2026 gp.path = pstrip(gp.path)
2026 2027 if gp.oldpath:
2027 2028 gp.oldpath = pstrip(gp.oldpath)
2028 2029 else:
2029 2030 gp = makepatchmeta(backend, afile, bfile, first_hunk, strip,
2030 2031 prefix)
2031 2032 if gp.op == 'RENAME':
2032 2033 backend.unlink(gp.oldpath)
2033 2034 if not first_hunk:
2034 2035 if gp.op == 'DELETE':
2035 2036 backend.unlink(gp.path)
2036 2037 continue
2037 2038 data, mode = None, None
2038 2039 if gp.op in ('RENAME', 'COPY'):
2039 2040 data, mode = store.getfile(gp.oldpath)[:2]
2040 2041 if data is None:
2041 2042 # This means that the old path does not exist
2042 2043 raise PatchError(_("source file '%s' does not exist")
2043 2044 % gp.oldpath)
2044 2045 if gp.mode:
2045 2046 mode = gp.mode
2046 2047 if gp.op == 'ADD':
2047 2048 # Added files without content have no hunk and
2048 2049 # must be created
2049 2050 data = ''
2050 2051 if data or mode:
2051 2052 if (gp.op in ('ADD', 'RENAME', 'COPY')
2052 2053 and backend.exists(gp.path)):
2053 2054 raise PatchError(_("cannot create %s: destination "
2054 2055 "already exists") % gp.path)
2055 2056 backend.setfile(gp.path, data, mode, gp.oldpath)
2056 2057 continue
2057 2058 try:
2058 2059 current_file = patcher(ui, gp, backend, store,
2059 2060 eolmode=eolmode)
2060 2061 except PatchError as inst:
2061 2062 ui.warn(str(inst) + '\n')
2062 2063 current_file = None
2063 2064 rejects += 1
2064 2065 continue
2065 2066 elif state == 'git':
2066 2067 for gp in values:
2067 2068 path = pstrip(gp.oldpath)
2068 2069 data, mode = backend.getfile(path)
2069 2070 if data is None:
2070 2071 # The error ignored here will trigger a getfile()
2071 2072 # error in a place more appropriate for error
2072 2073 # handling, and will not interrupt the patching
2073 2074 # process.
2074 2075 pass
2075 2076 else:
2076 2077 store.setfile(path, data, mode)
2077 2078 else:
2078 2079 raise error.Abort(_('unsupported parser state: %s') % state)
2079 2080
2080 2081 if current_file:
2081 2082 rejects += current_file.close()
2082 2083
2083 2084 if rejects:
2084 2085 return -1
2085 2086 return err
2086 2087
2087 2088 def _externalpatch(ui, repo, patcher, patchname, strip, files,
2088 2089 similarity):
2089 2090 """use <patcher> to apply <patchname> to the working directory.
2090 2091 returns whether patch was applied with fuzz factor."""
2091 2092
2092 2093 fuzz = False
2093 2094 args = []
2094 2095 cwd = repo.root
2095 2096 if cwd:
2096 2097 args.append('-d %s' % util.shellquote(cwd))
2097 2098 fp = util.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
2098 2099 util.shellquote(patchname)))
2099 2100 try:
2100 2101 for line in util.iterfile(fp):
2101 2102 line = line.rstrip()
2102 2103 ui.note(line + '\n')
2103 2104 if line.startswith('patching file '):
2104 2105 pf = util.parsepatchoutput(line)
2105 2106 printed_file = False
2106 2107 files.add(pf)
2107 2108 elif line.find('with fuzz') >= 0:
2108 2109 fuzz = True
2109 2110 if not printed_file:
2110 2111 ui.warn(pf + '\n')
2111 2112 printed_file = True
2112 2113 ui.warn(line + '\n')
2113 2114 elif line.find('saving rejects to file') >= 0:
2114 2115 ui.warn(line + '\n')
2115 2116 elif line.find('FAILED') >= 0:
2116 2117 if not printed_file:
2117 2118 ui.warn(pf + '\n')
2118 2119 printed_file = True
2119 2120 ui.warn(line + '\n')
2120 2121 finally:
2121 2122 if files:
2122 2123 scmutil.marktouched(repo, files, similarity)
2123 2124 code = fp.close()
2124 2125 if code:
2125 2126 raise PatchError(_("patch command failed: %s") %
2126 2127 util.explainexit(code)[0])
2127 2128 return fuzz
2128 2129
2129 2130 def patchbackend(ui, backend, patchobj, strip, prefix, files=None,
2130 2131 eolmode='strict'):
2131 2132 if files is None:
2132 2133 files = set()
2133 2134 if eolmode is None:
2134 2135 eolmode = ui.config('patch', 'eol')
2135 2136 if eolmode.lower() not in eolmodes:
2136 2137 raise error.Abort(_('unsupported line endings type: %s') % eolmode)
2137 2138 eolmode = eolmode.lower()
2138 2139
2139 2140 store = filestore()
2140 2141 try:
2141 2142 fp = open(patchobj, 'rb')
2142 2143 except TypeError:
2143 2144 fp = patchobj
2144 2145 try:
2145 2146 ret = applydiff(ui, fp, backend, store, strip=strip, prefix=prefix,
2146 2147 eolmode=eolmode)
2147 2148 finally:
2148 2149 if fp != patchobj:
2149 2150 fp.close()
2150 2151 files.update(backend.close())
2151 2152 store.close()
2152 2153 if ret < 0:
2153 2154 raise PatchError(_('patch failed to apply'))
2154 2155 return ret > 0
2155 2156
2156 2157 def internalpatch(ui, repo, patchobj, strip, prefix='', files=None,
2157 2158 eolmode='strict', similarity=0):
2158 2159 """use builtin patch to apply <patchobj> to the working directory.
2159 2160 returns whether patch was applied with fuzz factor."""
2160 2161 backend = workingbackend(ui, repo, similarity)
2161 2162 return patchbackend(ui, backend, patchobj, strip, prefix, files, eolmode)
2162 2163
2163 2164 def patchrepo(ui, repo, ctx, store, patchobj, strip, prefix, files=None,
2164 2165 eolmode='strict'):
2165 2166 backend = repobackend(ui, repo, ctx, store)
2166 2167 return patchbackend(ui, backend, patchobj, strip, prefix, files, eolmode)
2167 2168
2168 2169 def patch(ui, repo, patchname, strip=1, prefix='', files=None, eolmode='strict',
2169 2170 similarity=0):
2170 2171 """Apply <patchname> to the working directory.
2171 2172
2172 2173 'eolmode' specifies how end of lines should be handled. It can be:
2173 2174 - 'strict': inputs are read in binary mode, EOLs are preserved
2174 2175 - 'crlf': EOLs are ignored when patching and reset to CRLF
2175 2176 - 'lf': EOLs are ignored when patching and reset to LF
2176 2177 - None: get it from user settings, default to 'strict'
2177 2178 'eolmode' is ignored when using an external patcher program.
2178 2179
2179 2180 Returns whether patch was applied with fuzz factor.
2180 2181 """
2181 2182 patcher = ui.config('ui', 'patch')
2182 2183 if files is None:
2183 2184 files = set()
2184 2185 if patcher:
2185 2186 return _externalpatch(ui, repo, patcher, patchname, strip,
2186 2187 files, similarity)
2187 2188 return internalpatch(ui, repo, patchname, strip, prefix, files, eolmode,
2188 2189 similarity)
2189 2190
2190 2191 def changedfiles(ui, repo, patchpath, strip=1, prefix=''):
2191 2192 backend = fsbackend(ui, repo.root)
2192 2193 prefix = _canonprefix(repo, prefix)
2193 2194 with open(patchpath, 'rb') as fp:
2194 2195 changed = set()
2195 2196 for state, values in iterhunks(fp):
2196 2197 if state == 'file':
2197 2198 afile, bfile, first_hunk, gp = values
2198 2199 if gp:
2199 2200 gp.path = pathtransform(gp.path, strip - 1, prefix)[1]
2200 2201 if gp.oldpath:
2201 2202 gp.oldpath = pathtransform(gp.oldpath, strip - 1,
2202 2203 prefix)[1]
2203 2204 else:
2204 2205 gp = makepatchmeta(backend, afile, bfile, first_hunk, strip,
2205 2206 prefix)
2206 2207 changed.add(gp.path)
2207 2208 if gp.op == 'RENAME':
2208 2209 changed.add(gp.oldpath)
2209 2210 elif state not in ('hunk', 'git'):
2210 2211 raise error.Abort(_('unsupported parser state: %s') % state)
2211 2212 return changed
2212 2213
2213 2214 class GitDiffRequired(Exception):
2214 2215 pass
2215 2216
2216 2217 def diffallopts(ui, opts=None, untrusted=False, section='diff'):
2217 2218 '''return diffopts with all features supported and parsed'''
2218 2219 return difffeatureopts(ui, opts=opts, untrusted=untrusted, section=section,
2219 2220 git=True, whitespace=True, formatchanging=True)
2220 2221
2221 2222 diffopts = diffallopts
2222 2223
2223 2224 def difffeatureopts(ui, opts=None, untrusted=False, section='diff', git=False,
2224 2225 whitespace=False, formatchanging=False):
2225 2226 '''return diffopts with only opted-in features parsed
2226 2227
2227 2228 Features:
2228 2229 - git: git-style diffs
2229 2230 - whitespace: whitespace options like ignoreblanklines and ignorews
2230 2231 - formatchanging: options that will likely break or cause correctness issues
2231 2232 with most diff parsers
2232 2233 '''
2233 2234 def get(key, name=None, getter=ui.configbool, forceplain=None):
2234 2235 if opts:
2235 2236 v = opts.get(key)
2236 2237 # diffopts flags are either None-default (which is passed
2237 2238 # through unchanged, so we can identify unset values), or
2238 2239 # some other falsey default (eg --unified, which defaults
2239 2240 # to an empty string). We only want to override the config
2240 2241 # entries from hgrc with command line values if they
2241 2242 # appear to have been set, which is any truthy value,
2242 2243 # True, or False.
2243 2244 if v or isinstance(v, bool):
2244 2245 return v
2245 2246 if forceplain is not None and ui.plain():
2246 2247 return forceplain
2247 2248 return getter(section, name or key, untrusted=untrusted)
2248 2249
2249 2250 # core options, expected to be understood by every diff parser
2250 2251 buildopts = {
2251 2252 'nodates': get('nodates'),
2252 2253 'showfunc': get('show_function', 'showfunc'),
2253 2254 'context': get('unified', getter=ui.config),
2254 2255 }
2256 buildopts['worddiff'] = ui.configbool('experimental', 'worddiff')
2255 2257
2256 2258 if git:
2257 2259 buildopts['git'] = get('git')
2258 2260
2259 2261 # since this is in the experimental section, we need to call
2260 2262 # ui.configbool directory
2261 2263 buildopts['showsimilarity'] = ui.configbool('experimental',
2262 2264 'extendedheader.similarity')
2263 2265
2264 2266 # need to inspect the ui object instead of using get() since we want to
2265 2267 # test for an int
2266 2268 hconf = ui.config('experimental', 'extendedheader.index')
2267 2269 if hconf is not None:
2268 2270 hlen = None
2269 2271 try:
2270 2272 # the hash config could be an integer (for length of hash) or a
2271 2273 # word (e.g. short, full, none)
2272 2274 hlen = int(hconf)
2273 2275 if hlen < 0 or hlen > 40:
2274 2276 msg = _("invalid length for extendedheader.index: '%d'\n")
2275 2277 ui.warn(msg % hlen)
2276 2278 except ValueError:
2277 2279 # default value
2278 2280 if hconf == 'short' or hconf == '':
2279 2281 hlen = 12
2280 2282 elif hconf == 'full':
2281 2283 hlen = 40
2282 2284 elif hconf != 'none':
2283 2285 msg = _("invalid value for extendedheader.index: '%s'\n")
2284 2286 ui.warn(msg % hconf)
2285 2287 finally:
2286 2288 buildopts['index'] = hlen
2287 2289
2288 2290 if whitespace:
2289 2291 buildopts['ignorews'] = get('ignore_all_space', 'ignorews')
2290 2292 buildopts['ignorewsamount'] = get('ignore_space_change',
2291 2293 'ignorewsamount')
2292 2294 buildopts['ignoreblanklines'] = get('ignore_blank_lines',
2293 2295 'ignoreblanklines')
2294 2296 buildopts['ignorewseol'] = get('ignore_space_at_eol', 'ignorewseol')
2295 2297 if formatchanging:
2296 2298 buildopts['text'] = opts and opts.get('text')
2297 2299 binary = None if opts is None else opts.get('binary')
2298 2300 buildopts['nobinary'] = (not binary if binary is not None
2299 2301 else get('nobinary', forceplain=False))
2300 2302 buildopts['noprefix'] = get('noprefix', forceplain=False)
2301 2303
2302 2304 return mdiff.diffopts(**pycompat.strkwargs(buildopts))
2303 2305
2304 2306 def diff(repo, node1=None, node2=None, match=None, changes=None,
2305 2307 opts=None, losedatafn=None, prefix='', relroot='', copy=None,
2306 2308 hunksfilterfn=None):
2307 2309 '''yields diff of changes to files between two nodes, or node and
2308 2310 working directory.
2309 2311
2310 2312 if node1 is None, use first dirstate parent instead.
2311 2313 if node2 is None, compare node1 with working directory.
2312 2314
2313 2315 losedatafn(**kwarg) is a callable run when opts.upgrade=True and
2314 2316 every time some change cannot be represented with the current
2315 2317 patch format. Return False to upgrade to git patch format, True to
2316 2318 accept the loss or raise an exception to abort the diff. It is
2317 2319 called with the name of current file being diffed as 'fn'. If set
2318 2320 to None, patches will always be upgraded to git format when
2319 2321 necessary.
2320 2322
2321 2323 prefix is a filename prefix that is prepended to all filenames on
2322 2324 display (used for subrepos).
2323 2325
2324 2326 relroot, if not empty, must be normalized with a trailing /. Any match
2325 2327 patterns that fall outside it will be ignored.
2326 2328
2327 2329 copy, if not empty, should contain mappings {dst@y: src@x} of copy
2328 2330 information.
2329 2331
2330 2332 hunksfilterfn, if not None, should be a function taking a filectx and
2331 2333 hunks generator that may yield filtered hunks.
2332 2334 '''
2333 2335 for fctx1, fctx2, hdr, hunks in diffhunks(
2334 2336 repo, node1=node1, node2=node2,
2335 2337 match=match, changes=changes, opts=opts,
2336 2338 losedatafn=losedatafn, prefix=prefix, relroot=relroot, copy=copy,
2337 2339 ):
2338 2340 if hunksfilterfn is not None:
2339 2341 # If the file has been removed, fctx2 is None; but this should
2340 2342 # not occur here since we catch removed files early in
2341 2343 # cmdutil.getloglinerangerevs() for 'hg log -L'.
2342 2344 assert fctx2 is not None, \
2343 2345 'fctx2 unexpectly None in diff hunks filtering'
2344 2346 hunks = hunksfilterfn(fctx2, hunks)
2345 2347 text = ''.join(sum((list(hlines) for hrange, hlines in hunks), []))
2346 2348 if hdr and (text or len(hdr) > 1):
2347 2349 yield '\n'.join(hdr) + '\n'
2348 2350 if text:
2349 2351 yield text
2350 2352
2351 2353 def diffhunks(repo, node1=None, node2=None, match=None, changes=None,
2352 2354 opts=None, losedatafn=None, prefix='', relroot='', copy=None):
2353 2355 """Yield diff of changes to files in the form of (`header`, `hunks`) tuples
2354 2356 where `header` is a list of diff headers and `hunks` is an iterable of
2355 2357 (`hunkrange`, `hunklines`) tuples.
2356 2358
2357 2359 See diff() for the meaning of parameters.
2358 2360 """
2359 2361
2360 2362 if opts is None:
2361 2363 opts = mdiff.defaultopts
2362 2364
2363 2365 if not node1 and not node2:
2364 2366 node1 = repo.dirstate.p1()
2365 2367
2366 2368 def lrugetfilectx():
2367 2369 cache = {}
2368 2370 order = collections.deque()
2369 2371 def getfilectx(f, ctx):
2370 2372 fctx = ctx.filectx(f, filelog=cache.get(f))
2371 2373 if f not in cache:
2372 2374 if len(cache) > 20:
2373 2375 del cache[order.popleft()]
2374 2376 cache[f] = fctx.filelog()
2375 2377 else:
2376 2378 order.remove(f)
2377 2379 order.append(f)
2378 2380 return fctx
2379 2381 return getfilectx
2380 2382 getfilectx = lrugetfilectx()
2381 2383
2382 2384 ctx1 = repo[node1]
2383 2385 ctx2 = repo[node2]
2384 2386
2385 2387 relfiltered = False
2386 2388 if relroot != '' and match.always():
2387 2389 # as a special case, create a new matcher with just the relroot
2388 2390 pats = [relroot]
2389 2391 match = scmutil.match(ctx2, pats, default='path')
2390 2392 relfiltered = True
2391 2393
2392 2394 if not changes:
2393 2395 changes = repo.status(ctx1, ctx2, match=match)
2394 2396 modified, added, removed = changes[:3]
2395 2397
2396 2398 if not modified and not added and not removed:
2397 2399 return []
2398 2400
2399 2401 if repo.ui.debugflag:
2400 2402 hexfunc = hex
2401 2403 else:
2402 2404 hexfunc = short
2403 2405 revs = [hexfunc(node) for node in [ctx1.node(), ctx2.node()] if node]
2404 2406
2405 2407 if copy is None:
2406 2408 copy = {}
2407 2409 if opts.git or opts.upgrade:
2408 2410 copy = copies.pathcopies(ctx1, ctx2, match=match)
2409 2411
2410 2412 if relroot is not None:
2411 2413 if not relfiltered:
2412 2414 # XXX this would ideally be done in the matcher, but that is
2413 2415 # generally meant to 'or' patterns, not 'and' them. In this case we
2414 2416 # need to 'and' all the patterns from the matcher with relroot.
2415 2417 def filterrel(l):
2416 2418 return [f for f in l if f.startswith(relroot)]
2417 2419 modified = filterrel(modified)
2418 2420 added = filterrel(added)
2419 2421 removed = filterrel(removed)
2420 2422 relfiltered = True
2421 2423 # filter out copies where either side isn't inside the relative root
2422 2424 copy = dict(((dst, src) for (dst, src) in copy.iteritems()
2423 2425 if dst.startswith(relroot)
2424 2426 and src.startswith(relroot)))
2425 2427
2426 2428 modifiedset = set(modified)
2427 2429 addedset = set(added)
2428 2430 removedset = set(removed)
2429 2431 for f in modified:
2430 2432 if f not in ctx1:
2431 2433 # Fix up added, since merged-in additions appear as
2432 2434 # modifications during merges
2433 2435 modifiedset.remove(f)
2434 2436 addedset.add(f)
2435 2437 for f in removed:
2436 2438 if f not in ctx1:
2437 2439 # Merged-in additions that are then removed are reported as removed.
2438 2440 # They are not in ctx1, so We don't want to show them in the diff.
2439 2441 removedset.remove(f)
2440 2442 modified = sorted(modifiedset)
2441 2443 added = sorted(addedset)
2442 2444 removed = sorted(removedset)
2443 2445 for dst, src in copy.items():
2444 2446 if src not in ctx1:
2445 2447 # Files merged in during a merge and then copied/renamed are
2446 2448 # reported as copies. We want to show them in the diff as additions.
2447 2449 del copy[dst]
2448 2450
2449 2451 def difffn(opts, losedata):
2450 2452 return trydiff(repo, revs, ctx1, ctx2, modified, added, removed,
2451 2453 copy, getfilectx, opts, losedata, prefix, relroot)
2452 2454 if opts.upgrade and not opts.git:
2453 2455 try:
2454 2456 def losedata(fn):
2455 2457 if not losedatafn or not losedatafn(fn=fn):
2456 2458 raise GitDiffRequired
2457 2459 # Buffer the whole output until we are sure it can be generated
2458 2460 return list(difffn(opts.copy(git=False), losedata))
2459 2461 except GitDiffRequired:
2460 2462 return difffn(opts.copy(git=True), None)
2461 2463 else:
2462 2464 return difffn(opts, None)
2463 2465
2464 2466 def difflabel(func, *args, **kw):
2465 2467 '''yields 2-tuples of (output, label) based on the output of func()'''
2468 inlinecolor = False
2469 if kw.get('opts'):
2470 inlinecolor = kw['opts'].worddiff
2466 2471 headprefixes = [('diff', 'diff.diffline'),
2467 2472 ('copy', 'diff.extended'),
2468 2473 ('rename', 'diff.extended'),
2469 2474 ('old', 'diff.extended'),
2470 2475 ('new', 'diff.extended'),
2471 2476 ('deleted', 'diff.extended'),
2472 2477 ('index', 'diff.extended'),
2473 2478 ('similarity', 'diff.extended'),
2474 2479 ('---', 'diff.file_a'),
2475 2480 ('+++', 'diff.file_b')]
2476 2481 textprefixes = [('@', 'diff.hunk'),
2477 2482 ('-', 'diff.deleted'),
2478 2483 ('+', 'diff.inserted')]
2479 2484 head = False
2480 2485 for chunk in func(*args, **kw):
2481 2486 lines = chunk.split('\n')
2487 matches = {}
2488 if inlinecolor:
2489 matches = _findmatches(lines)
2482 2490 for i, line in enumerate(lines):
2483 2491 if i != 0:
2484 2492 yield ('\n', '')
2485 2493 if head:
2486 2494 if line.startswith('@'):
2487 2495 head = False
2488 2496 else:
2489 2497 if line and line[0] not in ' +-@\\':
2490 2498 head = True
2491 2499 stripline = line
2492 2500 diffline = False
2493 2501 if not head and line and line[0] in '+-':
2494 2502 # highlight tabs and trailing whitespace, but only in
2495 2503 # changed lines
2496 2504 stripline = line.rstrip()
2497 2505 diffline = True
2498 2506
2499 2507 prefixes = textprefixes
2500 2508 if head:
2501 2509 prefixes = headprefixes
2502 2510 for prefix, label in prefixes:
2503 2511 if stripline.startswith(prefix):
2504 2512 if diffline:
2505 2513 for token in tabsplitter.findall(stripline):
2506 2514 if '\t' == token[0]:
2507 2515 yield (token, 'diff.tab')
2508 2516 else:
2509 yield (token, label)
2517 if i in matches:
2518 for l, t in _inlinediff(
2519 lines[i].rstrip(),
2520 lines[matches[i]].rstrip(),
2521 label):
2522 yield (t, l)
2523 else:
2524 yield (token, label)
2510 2525 else:
2511 2526 yield (stripline, label)
2512 2527 break
2513 2528 else:
2514 2529 yield (line, '')
2515 2530 if line != stripline:
2516 2531 yield (line[len(stripline):], 'diff.trailingwhitespace')
2517 2532
2533 def _findmatches(slist):
2534 '''Look for insertion matches to deletion and returns a dict of
2535 correspondences.
2536 '''
2537 lastmatch = 0
2538 matches = {}
2539 for i, line in enumerate(slist):
2540 if line == '':
2541 continue
2542 if line[0] == '-':
2543 lastmatch = max(lastmatch, i)
2544 newgroup = False
2545 for j, newline in enumerate(slist[lastmatch + 1:]):
2546 if newline == '':
2547 continue
2548 if newline[0] == '-' and newgroup: # too far, no match
2549 break
2550 if newline[0] == '+': # potential match
2551 newgroup = True
2552 sim = difflib.SequenceMatcher(None, line, newline).ratio()
2553 if sim > 0.7:
2554 lastmatch = lastmatch + 1 + j
2555 matches[i] = lastmatch
2556 matches[lastmatch] = i
2557 break
2558 return matches
2559
2560 def _inlinediff(s1, s2, operation):
2561 '''Perform string diff to highlight specific changes.'''
2562 operation_skip = '+?' if operation == 'diff.deleted' else '-?'
2563 if operation == 'diff.deleted':
2564 s2, s1 = s1, s2
2565
2566 buff = []
2567 # we never want to higlight the leading +-
2568 if operation == 'diff.deleted' and s2.startswith('-'):
2569 label = operation
2570 token = '-'
2571 s2 = s2[1:]
2572 s1 = s1[1:]
2573 elif operation == 'diff.inserted' and s1.startswith('+'):
2574 label = operation
2575 token = '+'
2576 s2 = s2[1:]
2577 s1 = s1[1:]
2578
2579 s = difflib.ndiff(re.split(br'(\W)', s2), re.split(br'(\W)', s1))
2580 for part in s:
2581 if part[0] in operation_skip:
2582 continue
2583 l = operation + '.highlight'
2584 if part[0] in ' ':
2585 l = operation
2586 if l == label: # contiguous token with same label
2587 token += part[2:]
2588 continue
2589 else:
2590 buff.append((label, token))
2591 label = l
2592 token = part[2:]
2593 buff.append((label, token))
2594
2595 return buff
2596
2518 2597 def diffui(*args, **kw):
2519 2598 '''like diff(), but yields 2-tuples of (output, label) for ui.write()'''
2520 2599 return difflabel(diff, *args, **kw)
2521 2600
2522 2601 def _filepairs(modified, added, removed, copy, opts):
2523 2602 '''generates tuples (f1, f2, copyop), where f1 is the name of the file
2524 2603 before and f2 is the the name after. For added files, f1 will be None,
2525 2604 and for removed files, f2 will be None. copyop may be set to None, 'copy'
2526 2605 or 'rename' (the latter two only if opts.git is set).'''
2527 2606 gone = set()
2528 2607
2529 2608 copyto = dict([(v, k) for k, v in copy.items()])
2530 2609
2531 2610 addedset, removedset = set(added), set(removed)
2532 2611
2533 2612 for f in sorted(modified + added + removed):
2534 2613 copyop = None
2535 2614 f1, f2 = f, f
2536 2615 if f in addedset:
2537 2616 f1 = None
2538 2617 if f in copy:
2539 2618 if opts.git:
2540 2619 f1 = copy[f]
2541 2620 if f1 in removedset and f1 not in gone:
2542 2621 copyop = 'rename'
2543 2622 gone.add(f1)
2544 2623 else:
2545 2624 copyop = 'copy'
2546 2625 elif f in removedset:
2547 2626 f2 = None
2548 2627 if opts.git:
2549 2628 # have we already reported a copy above?
2550 2629 if (f in copyto and copyto[f] in addedset
2551 2630 and copy[copyto[f]] == f):
2552 2631 continue
2553 2632 yield f1, f2, copyop
2554 2633
2555 2634 def trydiff(repo, revs, ctx1, ctx2, modified, added, removed,
2556 2635 copy, getfilectx, opts, losedatafn, prefix, relroot):
2557 2636 '''given input data, generate a diff and yield it in blocks
2558 2637
2559 2638 If generating a diff would lose data like flags or binary data and
2560 2639 losedatafn is not None, it will be called.
2561 2640
2562 2641 relroot is removed and prefix is added to every path in the diff output.
2563 2642
2564 2643 If relroot is not empty, this function expects every path in modified,
2565 2644 added, removed and copy to start with it.'''
2566 2645
2567 2646 def gitindex(text):
2568 2647 if not text:
2569 2648 text = ""
2570 2649 l = len(text)
2571 2650 s = hashlib.sha1('blob %d\0' % l)
2572 2651 s.update(text)
2573 2652 return s.hexdigest()
2574 2653
2575 2654 if opts.noprefix:
2576 2655 aprefix = bprefix = ''
2577 2656 else:
2578 2657 aprefix = 'a/'
2579 2658 bprefix = 'b/'
2580 2659
2581 2660 def diffline(f, revs):
2582 2661 revinfo = ' '.join(["-r %s" % rev for rev in revs])
2583 2662 return 'diff %s %s' % (revinfo, f)
2584 2663
2585 2664 def isempty(fctx):
2586 2665 return fctx is None or fctx.size() == 0
2587 2666
2588 2667 date1 = util.datestr(ctx1.date())
2589 2668 date2 = util.datestr(ctx2.date())
2590 2669
2591 2670 gitmode = {'l': '120000', 'x': '100755', '': '100644'}
2592 2671
2593 2672 if relroot != '' and (repo.ui.configbool('devel', 'all-warnings')
2594 2673 or repo.ui.configbool('devel', 'check-relroot')):
2595 2674 for f in modified + added + removed + list(copy) + list(copy.values()):
2596 2675 if f is not None and not f.startswith(relroot):
2597 2676 raise AssertionError(
2598 2677 "file %s doesn't start with relroot %s" % (f, relroot))
2599 2678
2600 2679 for f1, f2, copyop in _filepairs(modified, added, removed, copy, opts):
2601 2680 content1 = None
2602 2681 content2 = None
2603 2682 fctx1 = None
2604 2683 fctx2 = None
2605 2684 flag1 = None
2606 2685 flag2 = None
2607 2686 if f1:
2608 2687 fctx1 = getfilectx(f1, ctx1)
2609 2688 if opts.git or losedatafn:
2610 2689 flag1 = ctx1.flags(f1)
2611 2690 if f2:
2612 2691 fctx2 = getfilectx(f2, ctx2)
2613 2692 if opts.git or losedatafn:
2614 2693 flag2 = ctx2.flags(f2)
2615 2694 # if binary is True, output "summary" or "base85", but not "text diff"
2616 2695 binary = not opts.text and any(f.isbinary()
2617 2696 for f in [fctx1, fctx2] if f is not None)
2618 2697
2619 2698 if losedatafn and not opts.git:
2620 2699 if (binary or
2621 2700 # copy/rename
2622 2701 f2 in copy or
2623 2702 # empty file creation
2624 2703 (not f1 and isempty(fctx2)) or
2625 2704 # empty file deletion
2626 2705 (isempty(fctx1) and not f2) or
2627 2706 # create with flags
2628 2707 (not f1 and flag2) or
2629 2708 # change flags
2630 2709 (f1 and f2 and flag1 != flag2)):
2631 2710 losedatafn(f2 or f1)
2632 2711
2633 2712 path1 = f1 or f2
2634 2713 path2 = f2 or f1
2635 2714 path1 = posixpath.join(prefix, path1[len(relroot):])
2636 2715 path2 = posixpath.join(prefix, path2[len(relroot):])
2637 2716 header = []
2638 2717 if opts.git:
2639 2718 header.append('diff --git %s%s %s%s' %
2640 2719 (aprefix, path1, bprefix, path2))
2641 2720 if not f1: # added
2642 2721 header.append('new file mode %s' % gitmode[flag2])
2643 2722 elif not f2: # removed
2644 2723 header.append('deleted file mode %s' % gitmode[flag1])
2645 2724 else: # modified/copied/renamed
2646 2725 mode1, mode2 = gitmode[flag1], gitmode[flag2]
2647 2726 if mode1 != mode2:
2648 2727 header.append('old mode %s' % mode1)
2649 2728 header.append('new mode %s' % mode2)
2650 2729 if copyop is not None:
2651 2730 if opts.showsimilarity:
2652 2731 sim = similar.score(ctx1[path1], ctx2[path2]) * 100
2653 2732 header.append('similarity index %d%%' % sim)
2654 2733 header.append('%s from %s' % (copyop, path1))
2655 2734 header.append('%s to %s' % (copyop, path2))
2656 2735 elif revs and not repo.ui.quiet:
2657 2736 header.append(diffline(path1, revs))
2658 2737
2659 2738 # fctx.is | diffopts | what to | is fctx.data()
2660 2739 # binary() | text nobinary git index | output? | outputted?
2661 2740 # ------------------------------------|----------------------------
2662 2741 # yes | no no no * | summary | no
2663 2742 # yes | no no yes * | base85 | yes
2664 2743 # yes | no yes no * | summary | no
2665 2744 # yes | no yes yes 0 | summary | no
2666 2745 # yes | no yes yes >0 | summary | semi [1]
2667 2746 # yes | yes * * * | text diff | yes
2668 2747 # no | * * * * | text diff | yes
2669 2748 # [1]: hash(fctx.data()) is outputted. so fctx.data() cannot be faked
2670 2749 if binary and (not opts.git or (opts.git and opts.nobinary and not
2671 2750 opts.index)):
2672 2751 # fast path: no binary content will be displayed, content1 and
2673 2752 # content2 are only used for equivalent test. cmp() could have a
2674 2753 # fast path.
2675 2754 if fctx1 is not None:
2676 2755 content1 = b'\0'
2677 2756 if fctx2 is not None:
2678 2757 if fctx1 is not None and not fctx1.cmp(fctx2):
2679 2758 content2 = b'\0' # not different
2680 2759 else:
2681 2760 content2 = b'\0\0'
2682 2761 else:
2683 2762 # normal path: load contents
2684 2763 if fctx1 is not None:
2685 2764 content1 = fctx1.data()
2686 2765 if fctx2 is not None:
2687 2766 content2 = fctx2.data()
2688 2767
2689 2768 if binary and opts.git and not opts.nobinary:
2690 2769 text = mdiff.b85diff(content1, content2)
2691 2770 if text:
2692 2771 header.append('index %s..%s' %
2693 2772 (gitindex(content1), gitindex(content2)))
2694 2773 hunks = (None, [text]),
2695 2774 else:
2696 2775 if opts.git and opts.index > 0:
2697 2776 flag = flag1
2698 2777 if flag is None:
2699 2778 flag = flag2
2700 2779 header.append('index %s..%s %s' %
2701 2780 (gitindex(content1)[0:opts.index],
2702 2781 gitindex(content2)[0:opts.index],
2703 2782 gitmode[flag]))
2704 2783
2705 2784 uheaders, hunks = mdiff.unidiff(content1, date1,
2706 2785 content2, date2,
2707 2786 path1, path2, opts=opts)
2708 2787 header.extend(uheaders)
2709 2788 yield fctx1, fctx2, header, hunks
2710 2789
2711 2790 def diffstatsum(stats):
2712 2791 maxfile, maxtotal, addtotal, removetotal, binary = 0, 0, 0, 0, False
2713 2792 for f, a, r, b in stats:
2714 2793 maxfile = max(maxfile, encoding.colwidth(f))
2715 2794 maxtotal = max(maxtotal, a + r)
2716 2795 addtotal += a
2717 2796 removetotal += r
2718 2797 binary = binary or b
2719 2798
2720 2799 return maxfile, maxtotal, addtotal, removetotal, binary
2721 2800
2722 2801 def diffstatdata(lines):
2723 2802 diffre = re.compile('^diff .*-r [a-z0-9]+\s(.*)$')
2724 2803
2725 2804 results = []
2726 2805 filename, adds, removes, isbinary = None, 0, 0, False
2727 2806
2728 2807 def addresult():
2729 2808 if filename:
2730 2809 results.append((filename, adds, removes, isbinary))
2731 2810
2732 2811 # inheader is used to track if a line is in the
2733 2812 # header portion of the diff. This helps properly account
2734 2813 # for lines that start with '--' or '++'
2735 2814 inheader = False
2736 2815
2737 2816 for line in lines:
2738 2817 if line.startswith('diff'):
2739 2818 addresult()
2740 2819 # starting a new file diff
2741 2820 # set numbers to 0 and reset inheader
2742 2821 inheader = True
2743 2822 adds, removes, isbinary = 0, 0, False
2744 2823 if line.startswith('diff --git a/'):
2745 2824 filename = gitre.search(line).group(2)
2746 2825 elif line.startswith('diff -r'):
2747 2826 # format: "diff -r ... -r ... filename"
2748 2827 filename = diffre.search(line).group(1)
2749 2828 elif line.startswith('@@'):
2750 2829 inheader = False
2751 2830 elif line.startswith('+') and not inheader:
2752 2831 adds += 1
2753 2832 elif line.startswith('-') and not inheader:
2754 2833 removes += 1
2755 2834 elif (line.startswith('GIT binary patch') or
2756 2835 line.startswith('Binary file')):
2757 2836 isbinary = True
2758 2837 addresult()
2759 2838 return results
2760 2839
2761 2840 def diffstat(lines, width=80):
2762 2841 output = []
2763 2842 stats = diffstatdata(lines)
2764 2843 maxname, maxtotal, totaladds, totalremoves, hasbinary = diffstatsum(stats)
2765 2844
2766 2845 countwidth = len(str(maxtotal))
2767 2846 if hasbinary and countwidth < 3:
2768 2847 countwidth = 3
2769 2848 graphwidth = width - countwidth - maxname - 6
2770 2849 if graphwidth < 10:
2771 2850 graphwidth = 10
2772 2851
2773 2852 def scale(i):
2774 2853 if maxtotal <= graphwidth:
2775 2854 return i
2776 2855 # If diffstat runs out of room it doesn't print anything,
2777 2856 # which isn't very useful, so always print at least one + or -
2778 2857 # if there were at least some changes.
2779 2858 return max(i * graphwidth // maxtotal, int(bool(i)))
2780 2859
2781 2860 for filename, adds, removes, isbinary in stats:
2782 2861 if isbinary:
2783 2862 count = 'Bin'
2784 2863 else:
2785 2864 count = '%d' % (adds + removes)
2786 2865 pluses = '+' * scale(adds)
2787 2866 minuses = '-' * scale(removes)
2788 2867 output.append(' %s%s | %*s %s%s\n' %
2789 2868 (filename, ' ' * (maxname - encoding.colwidth(filename)),
2790 2869 countwidth, count, pluses, minuses))
2791 2870
2792 2871 if stats:
2793 2872 output.append(_(' %d files changed, %d insertions(+), '
2794 2873 '%d deletions(-)\n')
2795 2874 % (len(stats), totaladds, totalremoves))
2796 2875
2797 2876 return ''.join(output)
2798 2877
2799 2878 def diffstatui(*args, **kw):
2800 2879 '''like diffstat(), but yields 2-tuples of (output, label) for
2801 2880 ui.write()
2802 2881 '''
2803 2882
2804 2883 for line in diffstat(*args, **kw).splitlines():
2805 2884 if line and line[-1] in '+-':
2806 2885 name, graph = line.rsplit(' ', 1)
2807 2886 yield (name + ' ', '')
2808 2887 m = re.search(br'\++', graph)
2809 2888 if m:
2810 2889 yield (m.group(0), 'diffstat.inserted')
2811 2890 m = re.search(br'-+', graph)
2812 2891 if m:
2813 2892 yield (m.group(0), 'diffstat.deleted')
2814 2893 else:
2815 2894 yield (line, '')
2816 2895 yield ('\n', '')
@@ -1,261 +1,353 b''
1 1 Setup
2 2
3 3 $ cat <<EOF >> $HGRCPATH
4 4 > [ui]
5 5 > color = yes
6 6 > formatted = always
7 7 > paginate = never
8 8 > [color]
9 9 > mode = ansi
10 10 > EOF
11 11 $ hg init repo
12 12 $ cd repo
13 13 $ cat > a <<EOF
14 14 > c
15 15 > c
16 16 > a
17 17 > a
18 18 > b
19 19 > a
20 20 > a
21 21 > c
22 22 > c
23 23 > EOF
24 24 $ hg ci -Am adda
25 25 adding a
26 26 $ cat > a <<EOF
27 27 > c
28 28 > c
29 29 > a
30 30 > a
31 31 > dd
32 32 > a
33 33 > a
34 34 > c
35 35 > c
36 36 > EOF
37 37
38 38 default context
39 39
40 40 $ hg diff --nodates
41 41 \x1b[0;1mdiff -r cf9f4ba66af2 a\x1b[0m (esc)
42 42 \x1b[0;31;1m--- a/a\x1b[0m (esc)
43 43 \x1b[0;32;1m+++ b/a\x1b[0m (esc)
44 44 \x1b[0;35m@@ -2,7 +2,7 @@\x1b[0m (esc)
45 45 c
46 46 a
47 47 a
48 48 \x1b[0;31m-b\x1b[0m (esc)
49 49 \x1b[0;32m+dd\x1b[0m (esc)
50 50 a
51 51 a
52 52 c
53 53
54 54 (check that 'ui.color=yes' match '--color=auto')
55 55
56 56 $ hg diff --nodates --config ui.formatted=no
57 57 diff -r cf9f4ba66af2 a
58 58 --- a/a
59 59 +++ b/a
60 60 @@ -2,7 +2,7 @@
61 61 c
62 62 a
63 63 a
64 64 -b
65 65 +dd
66 66 a
67 67 a
68 68 c
69 69
70 70 (check that 'ui.color=no' disable color)
71 71
72 72 $ hg diff --nodates --config ui.formatted=yes --config ui.color=no
73 73 diff -r cf9f4ba66af2 a
74 74 --- a/a
75 75 +++ b/a
76 76 @@ -2,7 +2,7 @@
77 77 c
78 78 a
79 79 a
80 80 -b
81 81 +dd
82 82 a
83 83 a
84 84 c
85 85
86 86 (check that 'ui.color=always' force color)
87 87
88 88 $ hg diff --nodates --config ui.formatted=no --config ui.color=always
89 89 \x1b[0;1mdiff -r cf9f4ba66af2 a\x1b[0m (esc)
90 90 \x1b[0;31;1m--- a/a\x1b[0m (esc)
91 91 \x1b[0;32;1m+++ b/a\x1b[0m (esc)
92 92 \x1b[0;35m@@ -2,7 +2,7 @@\x1b[0m (esc)
93 93 c
94 94 a
95 95 a
96 96 \x1b[0;31m-b\x1b[0m (esc)
97 97 \x1b[0;32m+dd\x1b[0m (esc)
98 98 a
99 99 a
100 100 c
101 101
102 102 --unified=2
103 103
104 104 $ hg diff --nodates -U 2
105 105 \x1b[0;1mdiff -r cf9f4ba66af2 a\x1b[0m (esc)
106 106 \x1b[0;31;1m--- a/a\x1b[0m (esc)
107 107 \x1b[0;32;1m+++ b/a\x1b[0m (esc)
108 108 \x1b[0;35m@@ -3,5 +3,5 @@\x1b[0m (esc)
109 109 a
110 110 a
111 111 \x1b[0;31m-b\x1b[0m (esc)
112 112 \x1b[0;32m+dd\x1b[0m (esc)
113 113 a
114 114 a
115 115
116 116 diffstat
117 117
118 118 $ hg diff --stat
119 119 a | 2 \x1b[0;32m+\x1b[0m\x1b[0;31m-\x1b[0m (esc)
120 120 1 files changed, 1 insertions(+), 1 deletions(-)
121 121 $ cat <<EOF >> $HGRCPATH
122 122 > [extensions]
123 123 > record =
124 124 > [ui]
125 125 > interactive = true
126 126 > [diff]
127 127 > git = True
128 128 > EOF
129 129
130 130 #if execbit
131 131
132 132 record
133 133
134 134 $ chmod +x a
135 135 $ hg record -m moda a <<EOF
136 136 > y
137 137 > y
138 138 > EOF
139 139 \x1b[0;1mdiff --git a/a b/a\x1b[0m (esc)
140 140 \x1b[0;36;1mold mode 100644\x1b[0m (esc)
141 141 \x1b[0;36;1mnew mode 100755\x1b[0m (esc)
142 142 1 hunks, 1 lines changed
143 143 \x1b[0;33mexamine changes to 'a'? [Ynesfdaq?]\x1b[0m y (esc)
144 144
145 145 \x1b[0;35m@@ -2,7 +2,7 @@ c\x1b[0m (esc)
146 146 c
147 147 a
148 148 a
149 149 \x1b[0;31m-b\x1b[0m (esc)
150 150 \x1b[0;32m+dd\x1b[0m (esc)
151 151 a
152 152 a
153 153 c
154 154 \x1b[0;33mrecord this change to 'a'? [Ynesfdaq?]\x1b[0m y (esc)
155 155
156 156
157 157 $ echo "[extensions]" >> $HGRCPATH
158 158 $ echo "mq=" >> $HGRCPATH
159 159 $ hg rollback
160 160 repository tip rolled back to revision 0 (undo commit)
161 161 working directory now based on revision 0
162 162
163 163 qrecord
164 164
165 165 $ hg qrecord -m moda patch <<EOF
166 166 > y
167 167 > y
168 168 > EOF
169 169 \x1b[0;1mdiff --git a/a b/a\x1b[0m (esc)
170 170 \x1b[0;36;1mold mode 100644\x1b[0m (esc)
171 171 \x1b[0;36;1mnew mode 100755\x1b[0m (esc)
172 172 1 hunks, 1 lines changed
173 173 \x1b[0;33mexamine changes to 'a'? [Ynesfdaq?]\x1b[0m y (esc)
174 174
175 175 \x1b[0;35m@@ -2,7 +2,7 @@ c\x1b[0m (esc)
176 176 c
177 177 a
178 178 a
179 179 \x1b[0;31m-b\x1b[0m (esc)
180 180 \x1b[0;32m+dd\x1b[0m (esc)
181 181 a
182 182 a
183 183 c
184 184 \x1b[0;33mrecord this change to 'a'? [Ynesfdaq?]\x1b[0m y (esc)
185 185
186 186
187 187 $ hg qpop -a
188 188 popping patch
189 189 patch queue now empty
190 190
191 191 #endif
192 192
193 193 issue3712: test colorization of subrepo diff
194 194
195 195 $ hg init sub
196 196 $ echo b > sub/b
197 197 $ hg -R sub commit -Am 'create sub'
198 198 adding b
199 199 $ echo 'sub = sub' > .hgsub
200 200 $ hg add .hgsub
201 201 $ hg commit -m 'add subrepo sub'
202 202 $ echo aa >> a
203 203 $ echo bb >> sub/b
204 204
205 205 $ hg diff -S
206 206 \x1b[0;1mdiff --git a/a b/a\x1b[0m (esc)
207 207 \x1b[0;31;1m--- a/a\x1b[0m (esc)
208 208 \x1b[0;32;1m+++ b/a\x1b[0m (esc)
209 209 \x1b[0;35m@@ -7,3 +7,4 @@\x1b[0m (esc)
210 210 a
211 211 c
212 212 c
213 213 \x1b[0;32m+aa\x1b[0m (esc)
214 214 \x1b[0;1mdiff --git a/sub/b b/sub/b\x1b[0m (esc)
215 215 \x1b[0;31;1m--- a/sub/b\x1b[0m (esc)
216 216 \x1b[0;32;1m+++ b/sub/b\x1b[0m (esc)
217 217 \x1b[0;35m@@ -1,1 +1,2 @@\x1b[0m (esc)
218 218 b
219 219 \x1b[0;32m+bb\x1b[0m (esc)
220 220
221 221 test tabs
222 222
223 223 $ cat >> a <<EOF
224 224 > one tab
225 225 > two tabs
226 226 > end tab
227 227 > mid tab
228 228 > all tabs
229 229 > EOF
230 230 $ hg diff --nodates
231 231 \x1b[0;1mdiff --git a/a b/a\x1b[0m (esc)
232 232 \x1b[0;31;1m--- a/a\x1b[0m (esc)
233 233 \x1b[0;32;1m+++ b/a\x1b[0m (esc)
234 234 \x1b[0;35m@@ -7,3 +7,9 @@\x1b[0m (esc)
235 235 a
236 236 c
237 237 c
238 238 \x1b[0;32m+aa\x1b[0m (esc)
239 239 \x1b[0;32m+\x1b[0m \x1b[0;32mone tab\x1b[0m (esc)
240 240 \x1b[0;32m+\x1b[0m \x1b[0;32mtwo tabs\x1b[0m (esc)
241 241 \x1b[0;32m+end tab\x1b[0m\x1b[0;1;41m \x1b[0m (esc)
242 242 \x1b[0;32m+mid\x1b[0m \x1b[0;32mtab\x1b[0m (esc)
243 243 \x1b[0;32m+\x1b[0m \x1b[0;32mall\x1b[0m \x1b[0;32mtabs\x1b[0m\x1b[0;1;41m \x1b[0m (esc)
244 244 $ echo "[color]" >> $HGRCPATH
245 245 $ echo "diff.tab = bold magenta" >> $HGRCPATH
246 246 $ hg diff --nodates
247 247 \x1b[0;1mdiff --git a/a b/a\x1b[0m (esc)
248 248 \x1b[0;31;1m--- a/a\x1b[0m (esc)
249 249 \x1b[0;32;1m+++ b/a\x1b[0m (esc)
250 250 \x1b[0;35m@@ -7,3 +7,9 @@\x1b[0m (esc)
251 251 a
252 252 c
253 253 c
254 254 \x1b[0;32m+aa\x1b[0m (esc)
255 255 \x1b[0;32m+\x1b[0m\x1b[0;1;35m \x1b[0m\x1b[0;32mone tab\x1b[0m (esc)
256 256 \x1b[0;32m+\x1b[0m\x1b[0;1;35m \x1b[0m\x1b[0;32mtwo tabs\x1b[0m (esc)
257 257 \x1b[0;32m+end tab\x1b[0m\x1b[0;1;41m \x1b[0m (esc)
258 258 \x1b[0;32m+mid\x1b[0m\x1b[0;1;35m \x1b[0m\x1b[0;32mtab\x1b[0m (esc)
259 259 \x1b[0;32m+\x1b[0m\x1b[0;1;35m \x1b[0m\x1b[0;32mall\x1b[0m\x1b[0;1;35m \x1b[0m\x1b[0;32mtabs\x1b[0m\x1b[0;1;41m \x1b[0m (esc)
260 260
261 261 $ cd ..
262
263 test inline color diff
264
265 $ hg init inline
266 $ cd inline
267 $ cat > file1 << EOF
268 > this is the first line
269 > this is the second line
270 > third line starts with space
271 > + starts with a plus sign
272 >
273 > this line won't change
274 >
275 > two lines are going to
276 > be changed into three!
277 >
278 > three of those lines will
279 > collapse onto one
280 > (to see if it works)
281 > EOF
282 $ hg add file1
283 $ hg ci -m 'commit'
284 $ cat > file1 << EOF
285 > that is the first paragraph
286 > this is the second line
287 > third line starts with space
288 > - starts with a minus sign
289 >
290 > this line won't change
291 >
292 > two lines are going to
293 > (entirely magically,
294 > assuming this works)
295 > be changed into four!
296 >
297 > three of those lines have
298 > collapsed onto one
299 > EOF
300 $ hg diff --config experimental.worddiff=False --color=debug
301 [diff.diffline|diff --git a/file1 b/file1]
302 [diff.file_a|--- a/file1]
303 [diff.file_b|+++ b/file1]
304 [diff.hunk|@@ -1,13 +1,14 @@]
305 [diff.deleted|-this is the first line]
306 [diff.deleted|-this is the second line]
307 [diff.deleted|- third line starts with space]
308 [diff.deleted|-+ starts with a plus sign]
309 [diff.inserted|+that is the first paragraph]
310 [diff.inserted|+ this is the second line]
311 [diff.inserted|+third line starts with space]
312 [diff.inserted|+- starts with a minus sign]
313
314 this line won't change
315
316 two lines are going to
317 [diff.deleted|-be changed into three!]
318 [diff.inserted|+(entirely magically,]
319 [diff.inserted|+ assuming this works)]
320 [diff.inserted|+be changed into four!]
321
322 [diff.deleted|-three of those lines will]
323 [diff.deleted|-collapse onto one]
324 [diff.deleted|-(to see if it works)]
325 [diff.inserted|+three of those lines have]
326 [diff.inserted|+collapsed onto one]
327 $ hg diff --config experimental.worddiff=True --color=debug
328 [diff.diffline|diff --git a/file1 b/file1]
329 [diff.file_a|--- a/file1]
330 [diff.file_b|+++ b/file1]
331 [diff.hunk|@@ -1,13 +1,14 @@]
332 [diff.deleted|-this is the ][diff.deleted.highlight|first][diff.deleted| line]
333 [diff.deleted|-this is the second line]
334 [diff.deleted|-][diff.deleted.highlight| ][diff.deleted|third line starts with space]
335 [diff.deleted|-][diff.deleted.highlight|+][diff.deleted| starts with a ][diff.deleted.highlight|plus][diff.deleted| sign]
336 [diff.inserted|+that is the first paragraph]
337 [diff.inserted|+][diff.inserted.highlight| ][diff.inserted|this is the ][diff.inserted.highlight|second][diff.inserted| line]
338 [diff.inserted|+third line starts with space]
339 [diff.inserted|+][diff.inserted.highlight|-][diff.inserted| starts with a ][diff.inserted.highlight|minus][diff.inserted| sign]
340
341 this line won't change
342
343 two lines are going to
344 [diff.deleted|-be changed into ][diff.deleted.highlight|three][diff.deleted|!]
345 [diff.inserted|+(entirely magically,]
346 [diff.inserted|+ assuming this works)]
347 [diff.inserted|+be changed into ][diff.inserted.highlight|four][diff.inserted|!]
348
349 [diff.deleted|-three of those lines ][diff.deleted.highlight|will]
350 [diff.deleted|-][diff.deleted.highlight|collapse][diff.deleted| onto one]
351 [diff.deleted|-(to see if it works)]
352 [diff.inserted|+three of those lines ][diff.inserted.highlight|have]
353 [diff.inserted|+][diff.inserted.highlight|collapsed][diff.inserted| onto one]
General Comments 0
You need to be logged in to leave comments. Login now