##// END OF EJS Templates
py3: make 'None in lazyancestors' not crash...
py3: make 'None in lazyancestors' not crash This looks somewhat weird, but we have callers like 'torev(n) in futurecommon' around where torev(n) is dictlike.get(n). I could fix callers, but that would be unnecessarily verbose.

File last commit:

r38607:1c93e023 @70 default
r38614:f8b46245 default
Show More
patch.py
2866 lines | 97.4 KiB | text/x-python | PythonLexer
Brendan Cully
Move patch-related code into its own module.
r2861 # patch.py - patch file parsing routines
#
Vadim Gelfer
merge git patch code.
r2865 # Copyright 2006 Brendan Cully <brendan@kublai.com>
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
Vadim Gelfer
merge git patch code.
r2865 #
Martin Geisler
updated license to be explicit about GPL version 2
r8225 # This software may be used and distributed according to the terms of the
Matt Mackall
Update license to GPLv2+
r10263 # GNU General Public License version 2 or any later version.
Brendan Cully
Move patch-related code into its own module.
r2861
Yuya Nishihara
doctest: use print_function and convert bytes to unicode where needed
r34139 from __future__ import absolute_import, print_function
Gregory Szorc
patch: use absolute_import
r27485
Martin von Zweigbergk
util: drop alias for collections.deque...
r25113 import collections
Gregory Szorc
patch: make extract() a context manager (API)...
r37639 import contextlib
Gregory Szorc
patch: use absolute_import
r27485 import copy
import email
import errno
Augie Fackler
cleanup: replace uses of util.(md5|sha1|sha256|sha512) with hashlib.\1...
r29341 import hashlib
Gregory Szorc
patch: use absolute_import
r27485 import os
import posixpath
import re
import shutil
import zlib
Augie Fackler
patch: move mercurial-specific imports after stdlib imports
r10965
Gregory Szorc
patch: use absolute_import
r27485 from .i18n import _
from .node import (
hex,
short,
)
from . import (
copies,
Yuya Nishihara
diffhelper: rename module to avoid conflicts with ancient C module (issue5846)...
r37821 diffhelper,
Yuya Nishihara
diffutil: move the module out of utils package...
r38607 diffutil,
Gregory Szorc
patch: use absolute_import
r27485 encoding,
error,
Julien Cristau
patch: when importing from email, RFC2047-decode From/Subject headers...
r28341 mail,
Gregory Szorc
patch: use absolute_import
r27485 mdiff,
pathutil,
Pulkit Goyal
py3: convert the mode argument of os.fdopen to unicodes (1 of 2)...
r30924 pycompat,
Gregory Szorc
patch: use absolute_import
r27485 scmutil,
Sean Farley
patch: use opt.showsimilarity to calculate and show the similarity...
r30807 similar,
Gregory Szorc
patch: use absolute_import
r27485 util,
Pierre-Yves David
vfs: use 'vfs' module directly in 'mercurial.patch'...
r31233 vfs as vfsmod,
Gregory Szorc
patch: use absolute_import
r27485 )
Yuya Nishihara
stringutil: bulk-replace call sites to point to new module...
r37102 from .utils import (
dateutil,
Yuya Nishihara
procutil: bulk-replace function calls to point to new module
r37138 procutil,
Yuya Nishihara
stringutil: bulk-replace call sites to point to new module...
r37102 stringutil,
)
Yuya Nishihara
diffhelpers: switch to policy importer...
r32370
timeless
pycompat: switch to util.stringio for py3 compat
r28861 stringio = util.stringio
Vadim Gelfer
commands.import: refactor patch parsing into patch.extract.
r2866
Pulkit Goyal
patch: make regular expressions bytes by adding b''
r31630 gitre = re.compile(br'diff --git a/(.*) b/(.*)')
tabsplitter = re.compile(br'(\t+|[^\t]+)')
Jun Wu
patch: implement a new worddiff algorithm...
r37750 wordsplitter = re.compile(br'(\t+| +|[a-zA-Z0-9_\x80-\xff]+|'
Augie Fackler
patch: fix import-time syntax error in test-check-module-imports.t...
r38063 b'[^ \ta-zA-Z0-9_\x80-\xff])')
Dirkjan Ochtman
patch: consolidate two different regexes for parsing of git diffs
r7199
Yuya Nishihara
error: move patch.PatchError so it can easily implement __bytes__ (API)
r34252 PatchError = error.PatchError
Brendan Cully
Move import's working dir update code into patch.updatedir
r2933
# public functions
Brendan Cully
import: import each patch in a file or stream as a separate change...
r10384 def split(stream):
'''return an iterator of individual patches from a stream'''
def isheader(line, inheader):
Yuya Nishihara
py3: use s.startswith() instead of s[n] while parsing patches...
r37489 if inheader and line.startswith((' ', '\t')):
Brendan Cully
import: import each patch in a file or stream as a separate change...
r10384 # continuation
return True
Yuya Nishihara
py3: use s.startswith() instead of s[n] while parsing patches...
r37489 if line.startswith((' ', '-', '+')):
Peter Arrenbrecht
patch: don't look for headers in diff lines...
r10883 # diff line - don't check for header pattern in there
return False
Brendan Cully
import: import each patch in a file or stream as a separate change...
r10384 l = line.split(': ', 1)
return len(l) == 2 and ' ' not in l[0]
def chunk(lines):
timeless
pycompat: switch to util.stringio for py3 compat
r28861 return stringio(''.join(lines))
Brendan Cully
import: import each patch in a file or stream as a separate change...
r10384
def hgsplit(stream, cur):
inheader = True
for line in stream:
if not line.strip():
inheader = False
if not inheader and line.startswith('# HG changeset patch'):
yield chunk(cur)
cur = []
inheader = True
cur.append(line)
if cur:
yield chunk(cur)
def mboxsplit(stream, cur):
for line in stream:
if line.startswith('From '):
for c in split(chunk(cur[1:])):
yield c
cur = []
cur.append(line)
if cur:
for c in split(chunk(cur[1:])):
yield c
def mimesplit(stream, cur):
def msgfp(m):
timeless
pycompat: switch to util.stringio for py3 compat
r28861 fp = stringio()
Brendan Cully
import: import each patch in a file or stream as a separate change...
r10384 g = email.Generator.Generator(fp, mangle_from_=False)
g.flatten(m)
fp.seek(0)
return fp
for line in stream:
cur.append(line)
c = chunk(cur)
Yuya Nishihara
py3: ditch email.parser.BytesParser which appears to be plain crap...
r38354 m = mail.parse(c)
Brendan Cully
import: import each patch in a file or stream as a separate change...
r10384 if not m.is_multipart():
yield msgfp(m)
else:
ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
for part in m.walk():
ct = part.get_content_type()
if ct not in ok_types:
continue
yield msgfp(part)
def headersplit(stream, cur):
inheader = False
for line in stream:
if not inheader and isheader(line, inheader):
yield chunk(cur)
cur = []
inheader = True
if inheader and not isheader(line, inheader):
inheader = False
cur.append(line)
if cur:
yield chunk(cur)
def remainder(cur):
yield chunk(cur)
class fiter(object):
def __init__(self, fp):
self.fp = fp
def __iter__(self):
return self
def next(self):
l = self.fp.readline()
if not l:
raise StopIteration
return l
Gregory Szorc
py3: define __next__ in patch.py...
r35192 __next__ = next
Brendan Cully
import: import each patch in a file or stream as a separate change...
r10384 inheader = False
cur = []
mimeheaders = ['content-type']
Augie Fackler
patch: use safehasattr instead of hasattr
r14966 if not util.safehasattr(stream, 'next'):
Brendan Cully
import: import each patch in a file or stream as a separate change...
r10384 # http responses, for example, have readline but not next
stream = fiter(stream)
for line in stream:
cur.append(line)
if line.startswith('# HG changeset patch'):
return hgsplit(stream, cur)
elif line.startswith('From '):
return mboxsplit(stream, cur)
elif isheader(line, inheader):
inheader = True
if line.split(':', 1)[0].lower() in mimeheaders:
# let email parser handle this
return mimesplit(stream, cur)
Brendan Cully
import: if in doubt, consume stream until start of diff...
r10501 elif line.startswith('--- ') and inheader:
# No evil headers seen by diff start, split by hand
Brendan Cully
import: import each patch in a file or stream as a separate change...
r10384 return headersplit(stream, cur)
# Not enough info, keep reading
# if we are here, we have a very plain patch
return remainder(cur)
Pierre-Yves David
extract: add some facility for extensible header parsing...
r26557 ## Some facility for extensible patch parsing:
# list of pairs ("header to match", "data key")
Pierre-Yves David
extract: parse 'branch' using the generic mechanism...
r26559 patchheadermap = [('Date', 'date'),
('Branch', 'branch'),
Pierre-Yves David
extract: parse 'nodeid' using the generic mechanism...
r26560 ('Node ID', 'nodeid'),
Pierre-Yves David
extract: parse 'branch' using the generic mechanism...
r26559 ]
Pierre-Yves David
extract: add some facility for extensible header parsing...
r26557
Gregory Szorc
patch: make extract() a context manager (API)...
r37639 @contextlib.contextmanager
Vadim Gelfer
commands.import: refactor patch parsing into patch.extract.
r2866 def extract(ui, fileobj):
'''extract patch from data read from fileobj.
Brendan Cully
Add import --exact....
r4263 patch can be a normal patch or contained in an email message.
Vadim Gelfer
commands.import: refactor patch parsing into patch.extract.
r2866
Mads Kiilerich
spelling: trivial spell checking
r26781 return a dictionary. Standard keys are:
Pierre-Yves David
patch: move 'extract' return to a dictionnary...
r26547 - filename,
- message,
- user,
- date,
- branch,
- node,
- p1,
- p2.
Mads Kiilerich
spelling: trivial spell checking
r26781 Any item can be missing from the dictionary. If filename is missing,
Brendan Cully
Add import --exact....
r4263 fileobj did not contain a patch. Caller must unlink filename when done.'''
Vadim Gelfer
commands.import: refactor patch parsing into patch.extract.
r2866
Yuya Nishihara
py3: wrap tempfile.mkstemp() to use bytes path...
r38182 fd, tmpname = pycompat.mkstemp(prefix='hg-patch-')
Gregory Szorc
patch: make extract() a context manager (API)...
r37639 tmpfp = os.fdopen(fd, r'wb')
try:
yield _extract(ui, fileobj, tmpname, tmpfp)
finally:
tmpfp.close()
os.unlink(tmpname)
def _extract(ui, fileobj, tmpname, tmpfp):
Vadim Gelfer
commands.import: refactor patch parsing into patch.extract.
r2866 # attempt to detect the start of a patch
# (this heuristic is borrowed from quilt)
Yuya Nishihara
patch: improve heuristics to not take the word "diff" as header (issue1879)...
r34965 diffre = re.compile(br'^(?:Index:[ \t]|diff[ \t]-|RCS file: |'
Yuya Nishihara
py3: fix type of regex literals in patch.py
r34068 br'retrieving revision [0-9]+(\.[0-9]+)*$|'
br'---[ \t].*?^\+\+\+[ \t]|'
br'\*\*\*[ \t].*?^---[ \t])',
re.MULTILINE | re.DOTALL)
Vadim Gelfer
commands.import: refactor patch parsing into patch.extract.
r2866
Pierre-Yves David
patch: move 'extract' return to a dictionnary...
r26547 data = {}
Gregory Szorc
patch: make extract() a context manager (API)...
r37639
Yuya Nishihara
py3: ditch email.parser.BytesParser which appears to be plain crap...
r38354 msg = mail.parse(fileobj)
Vadim Gelfer
commands.import: refactor patch parsing into patch.extract.
r2866
Gregory Szorc
patch: make extract() a context manager (API)...
r37639 subject = msg[r'Subject'] and mail.headdecode(msg[r'Subject'])
data['user'] = msg[r'From'] and mail.headdecode(msg[r'From'])
if not subject and not data['user']:
# Not an email, restore parsed headers if any
subject = '\n'.join(': '.join(map(encoding.strtolocal, h))
for h in msg.items()) + '\n'
Patrick Mezard
patch: do not swallow header-like patch first line (issue1859)...
r9573
Gregory Szorc
patch: make extract() a context manager (API)...
r37639 # should try to parse msg['Date']
parents = []
Vadim Gelfer
commands.import: refactor patch parsing into patch.extract.
r2866
Gregory Szorc
patch: make extract() a context manager (API)...
r37639 if subject:
if subject.startswith('[PATCH'):
pend = subject.find(']')
if pend >= 0:
subject = subject[pend + 1:].lstrip()
subject = re.sub(br'\n[ \t]+', ' ', subject)
ui.debug('Subject: %s\n' % subject)
if data['user']:
ui.debug('From: %s\n' % data['user'])
diffs_seen = 0
ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
message = ''
for part in msg.walk():
content_type = pycompat.bytestr(part.get_content_type())
ui.debug('Content-Type: %s\n' % content_type)
if content_type not in ok_types:
continue
payload = part.get_payload(decode=True)
m = diffre.search(payload)
if m:
hgpatch = False
hgpatchheader = False
ignoretext = False
Brendan Cully
git-send-email compatibility: stop reading changelog after ^---$
r4220
Gregory Szorc
patch: make extract() a context manager (API)...
r37639 ui.debug('found patch at byte %d\n' % m.start(0))
diffs_seen += 1
cfp = stringio()
for line in payload[:m.start(0)].splitlines():
if line.startswith('# HG changeset patch') and not hgpatch:
ui.debug('patch generated by hg export\n')
hgpatch = True
hgpatchheader = True
# drop earlier commit message content
cfp.seek(0)
cfp.truncate()
subject = None
elif hgpatchheader:
if line.startswith('# User '):
data['user'] = line[7:]
ui.debug('From: %s\n' % data['user'])
elif line.startswith("# Parent "):
parents.append(line[9:].lstrip())
elif line.startswith("# "):
for header, key in patchheadermap:
prefix = '# %s ' % header
if line.startswith(prefix):
data[key] = line[len(prefix):]
else:
hgpatchheader = False
elif line == '---':
ignoretext = True
if not hgpatchheader and not ignoretext:
cfp.write(line)
cfp.write('\n')
message = cfp.getvalue()
if tmpfp:
tmpfp.write(payload)
if not payload.endswith('\n'):
tmpfp.write('\n')
elif not diffs_seen and message and content_type == 'text/plain':
message += '\n' + payload
Vadim Gelfer
commands.import: refactor patch parsing into patch.extract.
r2866
Brendan Cully
patch.extract: do not prepend subject if the description already starts with it
r4777 if subject and not message.startswith(subject):
message = '%s\n%s' % (subject, message)
Pierre-Yves David
extract: assign message only once...
r26549 data['message'] = message
Vadim Gelfer
commands.import: refactor patch parsing into patch.extract.
r2866 tmpfp.close()
Jordi Gutiérrez Hermoso
style: kill ersatz if-else ternary operators...
r24306 if parents:
Pierre-Yves David
extract: directly assign parent to data dictionary...
r26550 data['p1'] = parents.pop(0)
Pierre-Yves David
extract: simplify parents assignement...
r26548 if parents:
Pierre-Yves David
extract: directly assign parent to data dictionary...
r26550 data['p2'] = parents.pop(0)
Jordi Gutiérrez Hermoso
style: kill ersatz if-else ternary operators...
r24306
Pierre-Yves David
extract: use a single return...
r26555 if diffs_seen:
data['filename'] = tmpname
Gregory Szorc
patch: make extract() a context manager (API)...
r37639
Pierre-Yves David
patch: move 'extract' return to a dictionnary...
r26547 return data
Brendan Cully
Move patch-related code into its own module.
r2861
Benoit Boissinot
use new style classes
r8778 class patchmeta(object):
Patrick Mezard
patch: extract and rename gitpatch into patchmeta, document
r7148 """Patched file metadata
'op' is the performed operation within ADD, DELETE, RENAME, MODIFY
or COPY. 'path' is patched file path. 'oldpath' is set to the
Patrick Mezard
patch: patchmeta gives (islink, isexec) tuple instead of int mode
r7149 origin file when 'op' is either COPY or RENAME, None otherwise. If
file mode is changed, 'mode' is a tuple (islink, isexec) where
'islink' is True if the file is a symlink and 'isexec' is True if
the file is executable. Otherwise, 'mode' is None.
Patrick Mezard
patch: extract and rename gitpatch into patchmeta, document
r7148 """
def __init__(self, path):
self.path = path
self.oldpath = None
self.mode = None
self.op = 'MODIFY'
self.binary = False
Patrick Mezard
patch: patchmeta gives (islink, isexec) tuple instead of int mode
r7149 def setmode(self, mode):
Gregory Szorc
global: mass rewrite to use modern octal syntax...
r25658 islink = mode & 0o20000
isexec = mode & 0o100
Patrick Mezard
patch: patchmeta gives (islink, isexec) tuple instead of int mode
r7149 self.mode = (islink, isexec)
Patrick Mezard
patch: generalize the use of patchmeta in applydiff()...
r14566 def copy(self):
other = patchmeta(self.path)
other.oldpath = self.oldpath
other.mode = self.mode
other.op = self.op
other.binary = self.binary
return other
Patrick Mezard
patch: fix patch hunk/metdata synchronization (issue3384)...
r16506 def _ispatchinga(self, afile):
if afile == '/dev/null':
return self.op == 'ADD'
return afile == 'a/' + (self.oldpath or self.path)
def _ispatchingb(self, bfile):
if bfile == '/dev/null':
return self.op == 'DELETE'
return bfile == 'b/' + self.path
def ispatching(self, afile, bfile):
return self._ispatchinga(afile) and self._ispatchingb(bfile)
Mads Kiilerich
patch: descriptive patchmeta.__repr__ to help debugging
r11018 def __repr__(self):
return "<patchmeta %s %r>" % (self.op, self.path)
Patrick Mezard
patch: pass linereader to scangitpatch(), extract from iterhunks()...
r7152 def readgitpatch(lr):
Brendan Cully
Move patch-related code into its own module.
r2861 """extract git-style metadata about patches from <patchname>"""
Thomas Arendsen Hein
Whitespace/Tab cleanup
r3223
Brendan Cully
Move patch-related code into its own module.
r2861 # Filter patch for git information
gp = None
gitpatches = []
Patrick Mezard
patch: pass linereader to scangitpatch(), extract from iterhunks()...
r7152 for line in lr:
Bill Barry
fix issue 1763: strip chars from end of line when parsing gitpatch lines
r9243 line = line.rstrip(' \r\n')
Sean Farley
patch: match 'diff --git a/' instead of 'diff --git'...
r18830 if line.startswith('diff --git a/'):
Brendan Cully
Move patch-related code into its own module.
r2861 m = gitre.match(line)
if m:
if gp:
gitpatches.append(gp)
Nicolas Dumazet
patch: readgitpatch: remove unused variable 'src'
r9392 dst = m.group(2)
Patrick Mezard
patch: extract and rename gitpatch into patchmeta, document
r7148 gp = patchmeta(dst)
Brendan Cully
Move patch-related code into its own module.
r2861 elif gp:
if line.startswith('--- '):
gitpatches.append(gp)
gp = None
continue
if line.startswith('rename from '):
gp.op = 'RENAME'
Bill Barry
fix issue 1763: strip chars from end of line when parsing gitpatch lines
r9243 gp.oldpath = line[12:]
Brendan Cully
Move patch-related code into its own module.
r2861 elif line.startswith('rename to '):
Bill Barry
fix issue 1763: strip chars from end of line when parsing gitpatch lines
r9243 gp.path = line[10:]
Brendan Cully
Move patch-related code into its own module.
r2861 elif line.startswith('copy from '):
gp.op = 'COPY'
Bill Barry
fix issue 1763: strip chars from end of line when parsing gitpatch lines
r9243 gp.oldpath = line[10:]
Brendan Cully
Move patch-related code into its own module.
r2861 elif line.startswith('copy to '):
Bill Barry
fix issue 1763: strip chars from end of line when parsing gitpatch lines
r9243 gp.path = line[8:]
Brendan Cully
Move patch-related code into its own module.
r2861 elif line.startswith('deleted file'):
gp.op = 'DELETE'
elif line.startswith('new file mode '):
gp.op = 'ADD'
Bill Barry
fix issue 1763: strip chars from end of line when parsing gitpatch lines
r9243 gp.setmode(int(line[-6:], 8))
Brendan Cully
Move patch-related code into its own module.
r2861 elif line.startswith('new mode '):
Bill Barry
fix issue 1763: strip chars from end of line when parsing gitpatch lines
r9243 gp.setmode(int(line[-6:], 8))
Brendan Cully
Add git-1.4 binary patch support
r3367 elif line.startswith('GIT binary patch'):
gp.binary = True
Brendan Cully
Move patch-related code into its own module.
r2861 if gp:
gitpatches.append(gp)
Patrick Mezard
patch: remove unused flags from readgitpatch()
r12669 return gitpatches
Brendan Cully
Move patch-related code into its own module.
r2861
Simon Heimberg
patch: use new style class in linereader
r8891 class linereader(object):
Patrick Mezard
Add patch.eol to ignore EOLs when patching (issue1019)...
r8810 # simple class to allow pushing lines back into the input stream
Patrick Mezard
patch: remove EOL support from linereader class...
r14418 def __init__(self, fp):
Patrick Mezard
Add patch.eol to ignore EOLs when patching (issue1019)...
r8810 self.fp = fp
self.buf = []
def push(self, line):
if line is not None:
self.buf.append(line)
def readline(self):
if self.buf:
l = self.buf[0]
del self.buf[0]
return l
Patrick Mezard
patch: remove EOL support from linereader class...
r14418 return self.fp.readline()
Patrick Mezard
Add patch.eol to ignore EOLs when patching (issue1019)...
r8810
def __iter__(self):
Augie Fackler
patch: use `iter(callable, sentinel)` instead of while True...
r29726 return iter(self.readline, '')
Patrick Mezard
Add patch.eol to ignore EOLs when patching (issue1019)...
r8810
Patrick Mezard
patch: extract fs access from patchfile into fsbackend...
r14348 class abstractbackend(object):
def __init__(self, ui):
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 self.ui = ui
Patrick Mezard
patch: extract fs access from patchfile into fsbackend...
r14348
Patrick Mezard
patch: unify backend file access interface...
r14391 def getfile(self, fname):
"""Return target file data and flags as a (data, (islink,
Mads Kiilerich
convert: use None value for missing files instead of overloading IOError...
r22296 isexec)) tuple. Data is None if file is missing/deleted.
Patrick Mezard
patch: extract fs access from patchfile into fsbackend...
r14348 """
raise NotImplementedError
Patrick Mezard
patch: use temporary files to handle intermediate copies...
r14452 def setfile(self, fname, data, mode, copysource):
Patrick Mezard
patch: unify backend file access interface...
r14391 """Write data to target file fname and set its mode. mode is a
(islink, isexec) tuple. If data is None, the file content should
Patrick Mezard
patch: use temporary files to handle intermediate copies...
r14452 be left unchanged. If the file is modified after being copied,
copysource is set to the original file name.
Patrick Mezard
patch: set desired mode when patching, not in updatedir()...
r14367 """
Patrick Mezard
patch: extract fs access from patchfile into fsbackend...
r14348 raise NotImplementedError
Patrick Mezard
patch: write rej files for missing targets (issue 853)
r5652
Patrick Mezard
patch: extract fs access from patchfile into fsbackend...
r14348 def unlink(self, fname):
"""Unlink target file."""
raise NotImplementedError
def writerej(self, fname, failed, total, lines):
"""Write rejected lines for fname. total is the number of hunks
which failed to apply and total the total number of hunks for this
files.
"""
Patrick Mezard
patch: add lexists() to backends, use it in selectfile()...
r14351 def exists(self, fname):
raise NotImplementedError
Martin von Zweigbergk
patch: add close() to abstractbackend...
r33156 def close(self):
raise NotImplementedError
Patrick Mezard
patch: extract fs access from patchfile into fsbackend...
r14348 class fsbackend(abstractbackend):
Patrick Mezard
patch: move copyfile() into backends, abstract basedir
r14350 def __init__(self, ui, basedir):
Patrick Mezard
patch: extract fs access from patchfile into fsbackend...
r14348 super(fsbackend, self).__init__(ui)
Pierre-Yves David
vfs: use 'vfs' module directly in 'mercurial.patch'...
r31233 self.opener = vfsmod.vfs(basedir)
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897
Patrick Mezard
patch: unify backend file access interface...
r14391 def getfile(self, fname):
Chinmay Joshi
patch: replace functions in fsbackend to use vfs...
r21717 if self.opener.islink(fname):
return (self.opener.readlink(fname), (True, False))
Patrick Mezard
patch: remove redundant islink() call
r14531 isexec = False
Patrick Mezard
patch: isolate patchfile filesystem calls into methods...
r7392 try:
Gregory Szorc
global: mass rewrite to use modern octal syntax...
r25658 isexec = self.opener.lstat(fname).st_mode & 0o100 != 0
Gregory Szorc
global: mass rewrite to use modern exception syntax...
r25660 except OSError as e:
Patrick Mezard
patch: unify backend file access interface...
r14391 if e.errno != errno.ENOENT:
raise
Mads Kiilerich
convert: use None value for missing files instead of overloading IOError...
r22296 try:
return (self.opener.read(fname), (False, isexec))
Gregory Szorc
global: mass rewrite to use modern exception syntax...
r25660 except IOError as e:
Mads Kiilerich
convert: use None value for missing files instead of overloading IOError...
r22296 if e.errno != errno.ENOENT:
raise
return None, None
Patrick Mezard
patch: isolate patchfile filesystem calls into methods...
r7392
Patrick Mezard
patch: use temporary files to handle intermediate copies...
r14452 def setfile(self, fname, data, mode, copysource):
Patrick Mezard
patch: unify backend file access interface...
r14391 islink, isexec = mode
if data is None:
Chinmay Joshi
patch: replace functions in fsbackend to use vfs...
r21717 self.opener.setflags(fname, islink, isexec)
Patrick Mezard
patch: merge backend setmode() into writelines()...
r14390 return
Patrick Mezard
patch: unify backend file access interface...
r14391 if islink:
self.opener.symlink(data, fname)
Patrick Mezard
patch: set desired mode when patching, not in updatedir()...
r14367 else:
Patrick Mezard
patch: unify backend file access interface...
r14391 self.opener.write(fname, data)
Patrick Mezard
patch: set desired mode when patching, not in updatedir()...
r14367 if isexec:
Chinmay Joshi
patch: replace functions in fsbackend to use vfs...
r21717 self.opener.setflags(fname, False, True)
Patrick Mezard
patch: isolate patchfile filesystem calls into methods...
r7392
def unlink(self, fname):
Kyle Lippincott
unlinkpath: make empty directory removal optional (issue5901) (issue5826)...
r38512 rmdir = self.ui.configbool('experimental', 'removeemptydirs')
self.opener.unlinkpath(fname, ignoremissing=True, rmdir=rmdir)
Patrick Mezard
patch: isolate patchfile filesystem calls into methods...
r7392
Patrick Mezard
patch: extract fs access from patchfile into fsbackend...
r14348 def writerej(self, fname, failed, total, lines):
fname = fname + ".rej"
self.ui.warn(
_("%d out of %d hunks FAILED -- saving rejects to file %s\n") %
(failed, total, fname))
fp = self.opener(fname, 'w')
fp.writelines(lines)
fp.close()
Patrick Mezard
patch: add lexists() to backends, use it in selectfile()...
r14351 def exists(self, fname):
Chinmay Joshi
patch: replace functions in fsbackend to use vfs...
r21717 return self.opener.lexists(fname)
Patrick Mezard
patch: add lexists() to backends, use it in selectfile()...
r14351
Patrick Mezard
patch: add a workingbackend dirstate layer on top of fsbackend...
r14370 class workingbackend(fsbackend):
def __init__(self, ui, repo, similarity):
super(workingbackend, self).__init__(ui, repo.root)
self.repo = repo
self.similarity = similarity
self.removed = set()
self.changed = set()
self.copied = []
Patrick Mezard
patch: do not patch unknown files (issue752)
r14453 def _checkknown(self, fname):
if self.repo.dirstate[fname] == '?' and self.exists(fname):
raise PatchError(_('cannot patch %s: file is not tracked') % fname)
Patrick Mezard
patch: use temporary files to handle intermediate copies...
r14452 def setfile(self, fname, data, mode, copysource):
Patrick Mezard
patch: do not patch unknown files (issue752)
r14453 self._checkknown(fname)
Patrick Mezard
patch: use temporary files to handle intermediate copies...
r14452 super(workingbackend, self).setfile(fname, data, mode, copysource)
if copysource is not None:
self.copied.append((copysource, fname))
Patrick Mezard
patch: add a workingbackend dirstate layer on top of fsbackend...
r14370 self.changed.add(fname)
def unlink(self, fname):
Patrick Mezard
patch: do not patch unknown files (issue752)
r14453 self._checkknown(fname)
Patrick Mezard
patch: add a workingbackend dirstate layer on top of fsbackend...
r14370 super(workingbackend, self).unlink(fname)
self.removed.add(fname)
self.changed.add(fname)
def close(self):
wctx = self.repo[None]
Siddharth Agarwal
patch: use scmutil.marktouched instead of scmutil.addremove...
r19155 changed = set(self.changed)
Patrick Mezard
patch: add a workingbackend dirstate layer on top of fsbackend...
r14370 for src, dst in self.copied:
scmutil.dirstatecopy(self.ui, self.repo, wctx, src, dst)
Patrick Mezard
import: handle git renames and --similarity (issue3187)...
r16112 if self.removed:
Matt Mackall
context: make forget work like commands.forget...
r14435 wctx.forget(sorted(self.removed))
Patrick Mezard
import: handle git renames and --similarity (issue3187)...
r16112 for f in self.removed:
if f not in self.repo.dirstate:
# File was deleted and no longer belongs to the
# dirstate, it was probably marked added then
# deleted, and should not be considered by
Siddharth Agarwal
patch: use scmutil.marktouched instead of scmutil.addremove...
r19155 # marktouched().
changed.discard(f)
if changed:
scmutil.marktouched(self.repo, changed, self.similarity)
Patrick Mezard
patch: add a workingbackend dirstate layer on top of fsbackend...
r14370 return sorted(self.changed)
Patrick Mezard
patch: use temporary files to handle intermediate copies...
r14452 class filestore(object):
Patrick Mezard
patch: make filestore store data in memory and fallback to fs
r14658 def __init__(self, maxsize=None):
Patrick Mezard
patch: use temporary files to handle intermediate copies...
r14452 self.opener = None
self.files = {}
self.created = 0
Patrick Mezard
patch: make filestore store data in memory and fallback to fs
r14658 self.maxsize = maxsize
if self.maxsize is None:
self.maxsize = 4*(2**20)
self.size = 0
self.data = {}
Patrick Mezard
patch: use temporary files to handle intermediate copies...
r14452
Patrick Mezard
patch: extend filtestore to store an optional copy source...
r14609 def setfile(self, fname, data, mode, copied=None):
Patrick Mezard
patch: make filestore store data in memory and fallback to fs
r14658 if self.maxsize < 0 or (len(data) + self.size) <= self.maxsize:
self.data[fname] = (data, mode, copied)
self.size += len(data)
else:
if self.opener is None:
Yuya Nishihara
py3: wrap tempfile.mkdtemp() to use bytes path...
r38183 root = pycompat.mkdtemp(prefix='hg-patch-')
Pierre-Yves David
vfs: use 'vfs' module directly in 'mercurial.patch'...
r31233 self.opener = vfsmod.vfs(root)
Patrick Mezard
patch: make filestore store data in memory and fallback to fs
r14658 # Avoid filename issues with these simple names
Augie Fackler
py3: convert known-int values to bytes using %d...
r36441 fn = '%d' % self.created
Patrick Mezard
patch: make filestore store data in memory and fallback to fs
r14658 self.opener.write(fn, data)
self.created += 1
self.files[fname] = (fn, mode, copied)
Patrick Mezard
patch: use temporary files to handle intermediate copies...
r14452
def getfile(self, fname):
Patrick Mezard
patch: make filestore store data in memory and fallback to fs
r14658 if fname in self.data:
return self.data[fname]
if not self.opener or fname not in self.files:
Mads Kiilerich
convert: use None value for missing files instead of overloading IOError...
r22296 return None, None, None
Patrick Mezard
patch: extend filtestore to store an optional copy source...
r14609 fn, mode, copied = self.files[fname]
return self.opener.read(fn), mode, copied
Patrick Mezard
patch: use temporary files to handle intermediate copies...
r14452
def close(self):
if self.opener:
shutil.rmtree(self.opener.base)
Patrick Mezard
import: add --bypass option...
r14611 class repobackend(abstractbackend):
def __init__(self, ui, repo, ctx, store):
super(repobackend, self).__init__(ui)
self.repo = repo
self.ctx = ctx
self.store = store
self.changed = set()
self.removed = set()
self.copied = {}
def _checkknown(self, fname):
if fname not in self.ctx:
raise PatchError(_('cannot patch %s: file is not tracked') % fname)
def getfile(self, fname):
try:
fctx = self.ctx[fname]
except error.LookupError:
Mads Kiilerich
convert: use None value for missing files instead of overloading IOError...
r22296 return None, None
Patrick Mezard
import: add --bypass option...
r14611 flags = fctx.flags()
return fctx.data(), ('l' in flags, 'x' in flags)
def setfile(self, fname, data, mode, copysource):
if copysource:
self._checkknown(copysource)
if data is None:
data = self.ctx[fname].data()
self.store.setfile(fname, data, mode, copysource)
self.changed.add(fname)
if copysource:
self.copied[fname] = copysource
def unlink(self, fname):
self._checkknown(fname)
self.removed.add(fname)
def exists(self, fname):
return fname in self.ctx
def close(self):
return self.changed | self.removed
Patrick Mezard
patch: extract fs access from patchfile into fsbackend...
r14348 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
Patrick Mezard
patch: simplify hunk extents parsing...
r15510 unidesc = re.compile('@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@')
contextdesc = re.compile('(?:---|\*\*\*) (\d+)(?:,(\d+))? (?:---|\*\*\*)')
Patrick Mezard
patch: extract fs access from patchfile into fsbackend...
r14348 eolmodes = ['strict', 'crlf', 'lf', 'auto']
class patchfile(object):
Patrick Mezard
patch: generalize the use of patchmeta in applydiff()...
r14566 def __init__(self, ui, gp, backend, store, eolmode='strict'):
self.fname = gp.path
Patrick Mezard
patch: extract fs access from patchfile into fsbackend...
r14348 self.eolmode = eolmode
self.eol = None
self.backend = backend
self.ui = ui
self.lines = []
self.exists = False
Patrick Mezard
patch: use temporary files to handle intermediate copies...
r14452 self.missing = True
Patrick Mezard
patch: generalize the use of patchmeta in applydiff()...
r14566 self.mode = gp.mode
self.copysource = gp.oldpath
self.create = gp.op in ('ADD', 'COPY', 'RENAME')
self.remove = gp.op == 'DELETE'
Mads Kiilerich
convert: use None value for missing files instead of overloading IOError...
r22296 if self.copysource is None:
data, mode = backend.getfile(self.fname)
else:
data, mode = store.getfile(self.copysource)[:2]
if data is not None:
self.exists = self.copysource is None or backend.exists(self.fname)
Patrick Mezard
patch: use temporary files to handle intermediate copies...
r14452 self.missing = False
if data:
Wagner Bruna
patch: fix parsing patch files containing CRs not followed by LFs...
r14832 self.lines = mdiff.splitnewlines(data)
Patrick Mezard
patch: use temporary files to handle intermediate copies...
r14452 if self.mode is None:
self.mode = mode
if self.lines:
# Normalize line endings
if self.lines[0].endswith('\r\n'):
self.eol = '\r\n'
elif self.lines[0].endswith('\n'):
self.eol = '\n'
if eolmode != 'strict':
nlines = []
for l in self.lines:
if l.endswith('\r\n'):
l = l[:-2] + '\n'
nlines.append(l)
self.lines = nlines
Mads Kiilerich
convert: use None value for missing files instead of overloading IOError...
r22296 else:
Patrick Mezard
patch: generalize the use of patchmeta in applydiff()...
r14566 if self.create:
Patrick Mezard
patch: use temporary files to handle intermediate copies...
r14452 self.missing = False
if self.mode is None:
self.mode = (False, False)
if self.missing:
Mads Kiilerich
check-code: indent 4 spaces in py files
r17299 self.ui.warn(_("unable to find '%s' for patching\n") % self.fname)
liscju
import: report directory-relative paths in error messages (issue5224)...
r29900 self.ui.warn(_("(use '--prefix' to apply patch relative to the "
"current directory)\n"))
Patrick Mezard
patch: extract fs access from patchfile into fsbackend...
r14348
self.hash = {}
self.dirty = 0
self.offset = 0
self.skew = 0
self.rej = []
self.fileprinted = False
self.printfile(False)
self.hunks = 0
Patrick Mezard
patch: set desired mode when patching, not in updatedir()...
r14367 def writelines(self, fname, lines, mode):
Patrick Mezard
patch: extract fs access from patchfile into fsbackend...
r14348 if self.eolmode == 'auto':
eol = self.eol
elif self.eolmode == 'crlf':
eol = '\r\n'
else:
eol = '\n'
if self.eolmode != 'strict' and eol and eol != '\n':
rawlines = []
for l in lines:
Yuya Nishihara
py3: replace s[-1] with s.endswith() in eol handling
r38351 if l and l.endswith('\n'):
Patrick Mezard
patch: extract fs access from patchfile into fsbackend...
r14348 l = l[:-1] + eol
rawlines.append(l)
lines = rawlines
Patrick Mezard
patch: use temporary files to handle intermediate copies...
r14452 self.backend.setfile(fname, ''.join(lines), mode, self.copysource)
Patrick Mezard
patch: extract fs access from patchfile into fsbackend...
r14348
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 def printfile(self, warn):
if self.fileprinted:
return
if warn or self.ui.verbose:
self.fileprinted = True
Bryan O'Sullivan
patch.py: fix some incorrect uses of _() for i18n
r4898 s = _("patching file %s\n") % self.fname
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 if warn:
self.ui.warn(s)
else:
self.ui.note(s)
def findlines(self, l, linenum):
# looks through the hash and finds candidate lines. The
# result is a list of line numbers sorted based on distance
# from linenum
Thomas Arendsen Hein
Remove trailing spaces, fix indentation
r5143
Benoit Boissinot
patch: simplify logic
r9681 cand = self.hash.get(l, [])
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 if len(cand) > 1:
# resort our list of potentials forward then back.
Alejandro Santos
compat: use 'key' argument instead of 'cmp' when sorting a list
r9032 cand.sort(key=lambda x: abs(x - linenum))
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 return cand
def write_rej(self):
# our rejects are a little different from patch(1). This always
# creates rejects in the same form as the original patch. A file
# header is inserted so that you can run the reject through patch again
# without having to type the filename.
if not self.rej:
return
Patrick Mezard
patch: merge makerejlines() into write_rej()
r14349 base = os.path.basename(self.fname)
lines = ["--- %s\n+++ %s\n" % (base, base)]
for x in self.rej:
for l in x.hunk:
lines.append(l)
Pulkit Goyal
diff: slice over bytes to make sure conditions work normally...
r31715 if l[-1:] != '\n':
Patrick Mezard
patch: merge makerejlines() into write_rej()
r14349 lines.append("\n\ No newline at end of file\n")
self.backend.writerej(self.fname, len(self.rej), self.hunks, lines)
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897
Nicolas Dumazet
patch: remove the unused, broken reverse() function
r9393 def apply(self, h):
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 if not h.complete():
Bryan O'Sullivan
patch.py: fix some incorrect uses of _() for i18n
r4898 raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 (h.number, h.desc, len(h.a), h.lena, len(h.b),
h.lenb))
self.hunks += 1
Patrick Mezard
patch: write rej files for missing targets (issue 853)
r5652 if self.missing:
self.rej.append(h)
return -1
Patrick Mezard
patch: refactor file creation/removal detection...
r14451 if self.exists and self.create:
Patrick Mezard
patch: use temporary files to handle intermediate copies...
r14452 if self.copysource:
self.ui.warn(_("cannot create %s: destination already "
FUJIWARA Katsunori
i18n: fix "% inside _()" problems...
r20869 "exists\n") % self.fname)
Patrick Mezard
patch: use temporary files to handle intermediate copies...
r14452 else:
self.ui.warn(_("file %s already exists\n") % self.fname)
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 self.rej.append(h)
return -1
Patrick Mezard
patch: handle symlinks without symlinkhunk...
r9585 if isinstance(h, binhunk):
Patrick Mezard
patch: refactor file creation/removal detection...
r14451 if self.remove:
Patrick Mezard
patch: extract fs access from patchfile into fsbackend...
r14348 self.backend.unlink(self.fname)
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 else:
Nicolas Vigier
patch: add support for git delta hunks...
r20137 l = h.new(self.lines)
self.lines[:] = l
self.offset += len(l)
Martin Geisler
patchfile: use real Booleans instead of 0/1
r14217 self.dirty = True
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 return 0
Patrick Mezard
patch: fix eolmode=auto with new files...
r10127 horig = h
Patrick Mezard
patch: drop eol normalization fast-path for 'lf' and 'crlf'...
r10128 if (self.eolmode in ('crlf', 'lf')
or self.eolmode == 'auto' and self.eol):
# If new eols are going to be normalized, then normalize
# hunk data before patching. Otherwise, preserve input
# line-endings.
Patrick Mezard
patch: fix eolmode=auto with new files...
r10127 h = h.getnormalized()
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 # fast case first, no offsets, no fuzz
Patrick Mezard
patch: make hunk.fuzzit() compute the fuzzed start locations...
r16122 old, oldstart, new, newstart = h.fuzzit(0, False)
oldstart += self.offset
orig_start = oldstart
Greg Onufer
patch: better handling of sequence of offset patch hunks (issue1941)...
r10135 # if there's skew we want to emit the "(offset %d lines)" even
# when the hunk cleanly applies at start + skew, so skip the
# fast case code
Yuya Nishihara
diffhelper: rename module to avoid conflicts with ancient C module (issue5846)...
r37821 if self.skew == 0 and diffhelper.testhunk(old, self.lines, oldstart):
Patrick Mezard
patch: refactor file creation/removal detection...
r14451 if self.remove:
Patrick Mezard
patch: extract fs access from patchfile into fsbackend...
r14348 self.backend.unlink(self.fname)
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 else:
Patrick Mezard
patch: make hunk.fuzzit() compute the fuzzed start locations...
r16122 self.lines[oldstart:oldstart + len(old)] = new
self.offset += len(new) - len(old)
Martin Geisler
patchfile: use real Booleans instead of 0/1
r14217 self.dirty = True
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 return 0
Patrick Mezard
patch: inline patchfile.hashlines()
r13700 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
self.hash = {}
for x, s in enumerate(self.lines):
self.hash.setdefault(s, []).append(x)
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897
configitems: register 'patch.fuzz' as first example for 'configint'...
r32988 for fuzzlen in xrange(self.ui.configint("patch", "fuzz") + 1):
Matt Mackall
many, many trivial check-code fixups
r10282 for toponly in [True, False]:
Patrick Mezard
patch: make hunk.fuzzit() compute the fuzzed start locations...
r16122 old, oldstart, new, newstart = h.fuzzit(fuzzlen, toponly)
Patrick Mezard
patch: fix fuzzing of hunks without previous lines (issue3264)...
r16123 oldstart = oldstart + self.offset + self.skew
oldstart = min(oldstart, len(self.lines))
if old:
cand = self.findlines(old[0][1:], oldstart)
else:
# Only adding lines with no or fuzzed context, just
# take the skew in account
cand = [oldstart]
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897
for l in cand:
Yuya Nishihara
diffhelper: rename module to avoid conflicts with ancient C module (issue5846)...
r37821 if not old or diffhelper.testhunk(old, self.lines, l):
Patrick Mezard
patch: fuzz old and new lines at the same time...
r16121 self.lines[l : l + len(old)] = new
self.offset += len(new) - len(old)
Greg Onufer
patch: better handling of sequence of offset patch hunks (issue1941)...
r10135 self.skew = l - orig_start
Martin Geisler
patchfile: use real Booleans instead of 0/1
r14217 self.dirty = True
Wagner Bruna
patch, i18n: avoid parameterized messages...
r10518 offset = l - orig_start - fuzzlen
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 if fuzzlen:
Wagner Bruna
patch, i18n: avoid parameterized messages...
r10518 msg = _("Hunk #%d succeeded at %d "
"with fuzz %d "
"(offset %d lines).\n")
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 self.printfile(True)
Wagner Bruna
patch, i18n: avoid parameterized messages...
r10518 self.ui.warn(msg %
(h.number, l + 1, fuzzlen, offset))
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 else:
Wagner Bruna
patch, i18n: avoid parameterized messages...
r10518 msg = _("Hunk #%d succeeded at %d "
Wagner Bruna
patch, i18n: avoid parameterized plural
r8090 "(offset %d lines).\n")
Wagner Bruna
patch, i18n: avoid parameterized messages...
r10518 self.ui.note(msg % (h.number, l + 1, offset))
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 return fuzzlen
self.printfile(True)
Bryan O'Sullivan
patch.py: fix some incorrect uses of _() for i18n
r4898 self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
Patrick Mezard
patch: fix eolmode=auto with new files...
r10127 self.rej.append(horig)
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 return -1
Patrick Mezard
patch: move closefile() into patchfile.close()
r13701 def close(self):
if self.dirty:
Patrick Mezard
patch: set desired mode when patching, not in updatedir()...
r14367 self.writelines(self.fname, self.lines, self.mode)
Patrick Mezard
patch: move closefile() into patchfile.close()
r13701 self.write_rej()
return len(self.rej)
Laurent Charignon
record: move header class from record to patch...
r24261 class header(object):
"""patch header
"""
diffgit_re = re.compile('diff --git a/(.*) b/(.*)$')
diff_re = re.compile('diff -r .* (.*)$')
Laurent Charignon
record: edit patch of newly added files (issue4304)...
r24845 allhunks_re = re.compile('(?:index|deleted file) ')
Laurent Charignon
record: move header class from record to patch...
r24261 pretty_re = re.compile('(?:new file|deleted file) ')
Laurent Charignon
record: edit patch of newly added files (issue4304)...
r24845 special_re = re.compile('(?:index|deleted|copy|rename) ')
newfile_re = re.compile('(?:new file)')
Laurent Charignon
record: move header class from record to patch...
r24261
def __init__(self, header):
self.header = header
self.hunks = []
def binary(self):
Augie Fackler
cleanup: use __builtins__.any instead of util.any...
r25149 return any(h.startswith('index ') for h in self.header)
Laurent Charignon
record: move header class from record to patch...
r24261
def pretty(self, fp):
for h in self.header:
if h.startswith('index '):
fp.write(_('this modifies a binary file (all or nothing)\n'))
break
if self.pretty_re.match(h):
fp.write(h)
if self.binary():
fp.write(_('this is a binary file\n'))
break
if h.startswith('---'):
fp.write(_('%d hunks, %d lines changed\n') %
(len(self.hunks),
sum([max(h.added, h.removed) for h in self.hunks])))
break
fp.write(h)
def write(self, fp):
fp.write(''.join(self.header))
def allhunks(self):
Augie Fackler
cleanup: use __builtins__.any instead of util.any...
r25149 return any(self.allhunks_re.match(h) for h in self.header)
Laurent Charignon
record: move header class from record to patch...
r24261
def files(self):
match = self.diffgit_re.match(self.header[0])
if match:
fromfile, tofile = match.groups()
if fromfile == tofile:
return [fromfile]
return [fromfile, tofile]
else:
return self.diff_re.match(self.header[0]).groups()
def filename(self):
return self.files()[-1]
def __repr__(self):
return '<header %s>' % (' '.join(map(repr, self.files())))
Laurent Charignon
record: edit patch of newly added files (issue4304)...
r24845 def isnewfile(self):
Augie Fackler
cleanup: use __builtins__.any instead of util.any...
r25149 return any(self.newfile_re.match(h) for h in self.header)
Laurent Charignon
record: edit patch of newly added files (issue4304)...
r24845
Laurent Charignon
record: move header class from record to patch...
r24261 def special(self):
Laurent Charignon
record: edit patch of newly added files (issue4304)...
r24845 # Special files are shown only at the header level and not at the hunk
# level for example a file that has been deleted is a special file.
# The user cannot change the content of the operation, in the case of
# the deleted file he has to take the deletion or not take it, he
# cannot take some of it.
# Newly added files are special if they are empty, they are not special
# if they have some content as we want to be able to change it
nocontent = len(self.header) == 2
emptynewfile = self.isnewfile() and nocontent
return emptynewfile or \
Augie Fackler
cleanup: use __builtins__.any instead of util.any...
r25149 any(self.special_re.match(h) for h in self.header)
Laurent Charignon
record: move header class from record to patch...
r24261
Laurent Charignon
record: move hunk class from record to patch...
r24263 class recordhunk(object):
"""patch hunk
XXX shouldn't we merge this with the other hunk class?
"""
Jun Wu
patch: make parsepatch optionally trim context lines...
r33270 def __init__(self, header, fromline, toline, proc, before, hunk, after,
maxcontext=None):
def trimcontext(lines, reverse=False):
if maxcontext is not None:
delta = len(lines) - maxcontext
if delta > 0:
if reverse:
return delta, lines[delta:]
else:
return delta, lines[:maxcontext]
return 0, lines
Laurent Charignon
record: move hunk class from record to patch...
r24263
self.header = header
Jun Wu
patch: make parsepatch optionally trim context lines...
r33270 trimedbefore, self.before = trimcontext(before, True)
self.fromline = fromline + trimedbefore
self.toline = toline + trimedbefore
_trimedafter, self.after = trimcontext(after, False)
Laurent Charignon
record: move hunk class from record to patch...
r24263 self.proc = proc
self.hunk = hunk
self.added, self.removed = self.countchanges(self.hunk)
Laurent Charignon
record: add comparison methods for recordhunk class
r24346 def __eq__(self, v):
if not isinstance(v, recordhunk):
return False
return ((v.hunk == self.hunk) and
(v.proc == self.proc) and
(self.fromline == v.fromline) and
(self.header.files() == v.header.files()))
def __hash__(self):
return hash((tuple(self.hunk),
tuple(self.header.files()),
self.fromline,
self.proc))
Laurent Charignon
record: move hunk class from record to patch...
r24263 def countchanges(self, hunk):
"""hunk -> (n+,n-)"""
Yuya Nishihara
py3: stop using bytes[n] in patch.py
r34253 add = len([h for h in hunk if h.startswith('+')])
rem = len([h for h in hunk if h.startswith('-')])
Laurent Charignon
record: move hunk class from record to patch...
r24263 return add, rem
Jun Wu
patch: rewrite reversehunks (issue5337)...
r32979 def reversehunk(self):
"""return another recordhunk which is the reverse of the hunk
If this hunk is diff(A, B), the returned hunk is diff(B, A). To do
that, swap fromline/toline and +/- signs while keep other things
unchanged.
"""
Jun Wu
record: fix revert -i for lines without newline (issue5651)...
r33941 m = {'+': '-', '-': '+', '\\': '\\'}
Yuya Nishihara
py3: stop using bytes[n] in patch.py
r34253 hunk = ['%s%s' % (m[l[0:1]], l[1:]) for l in self.hunk]
Jun Wu
patch: rewrite reversehunks (issue5337)...
r32979 return recordhunk(self.header, self.toline, self.fromline, self.proc,
self.before, hunk, self.after)
Laurent Charignon
record: move hunk class from record to patch...
r24263 def write(self, fp):
delta = len(self.before) + len(self.after)
if self.after and self.after[-1] == '\\ No newline at end of file\n':
delta -= 1
fromlen = delta + self.removed
tolen = delta + self.added
fp.write('@@ -%d,%d +%d,%d @@%s\n' %
(self.fromline, fromlen, self.toline, tolen,
self.proc and (' ' + self.proc)))
fp.write(''.join(self.before + self.hunk + self.after))
pretty = write
def filename(self):
return self.header.filename()
def __repr__(self):
return '<hunk %r@%d>' % (self.filename(), self.fromline)
Jun Wu
patch: do not cache translated messages (API)...
r34567 def getmessages():
return {
'multiple': {
Denis Laxalde
revert: do not reverse hunks in interactive when REV is not parent (issue5096)...
r34969 'apply': _("apply change %d/%d to '%s'?"),
Jun Wu
patch: do not cache translated messages (API)...
r34567 'discard': _("discard change %d/%d to '%s'?"),
'record': _("record change %d/%d to '%s'?"),
},
'single': {
Denis Laxalde
revert: do not reverse hunks in interactive when REV is not parent (issue5096)...
r34969 'apply': _("apply this change to '%s'?"),
Jun Wu
patch: do not cache translated messages (API)...
r34567 'discard': _("discard this change to '%s'?"),
'record': _("record this change to '%s'?"),
},
'help': {
Denis Laxalde
revert: do not reverse hunks in interactive when REV is not parent (issue5096)...
r34969 'apply': _('[Ynesfdaq?]'
'$$ &Yes, apply this change'
'$$ &No, skip this change'
'$$ &Edit this change manually'
'$$ &Skip remaining changes to this file'
'$$ Apply remaining changes to this &file'
'$$ &Done, skip remaining changes and files'
'$$ Apply &all changes to all remaining files'
'$$ &Quit, applying no changes'
'$$ &? (display help)'),
Jun Wu
patch: do not cache translated messages (API)...
r34567 'discard': _('[Ynesfdaq?]'
'$$ &Yes, discard this change'
'$$ &No, skip this change'
'$$ &Edit this change manually'
'$$ &Skip remaining changes to this file'
'$$ Discard remaining changes to this &file'
'$$ &Done, skip remaining changes and files'
'$$ Discard &all changes to all remaining files'
'$$ &Quit, discarding no changes'
'$$ &? (display help)'),
'record': _('[Ynesfdaq?]'
'$$ &Yes, record this change'
'$$ &No, skip this change'
'$$ &Edit this change manually'
'$$ &Skip remaining changes to this file'
'$$ Record remaining changes to this &file'
'$$ &Done, skip remaining changes and files'
'$$ Record &all changes to all remaining files'
'$$ &Quit, recording no changes'
'$$ &? (display help)'),
}
Pulkit Goyal
patch: take messages out of the function so that extensions can add entries...
r34044 }
Laurent Charignon
record: add an operation arguments to customize recording ui...
r25310 def filterpatch(ui, headers, operation=None):
Laurent Charignon
record: move filterpatch from record to patch...
r24269 """Interactively filter patch chunks into applied-only chunks"""
Jun Wu
patch: do not cache translated messages (API)...
r34567 messages = getmessages()
Laurent Charignon
record: add default value for operation argument...
r25359 if operation is None:
Denis Laxalde
patch: define full messages for interactive record/revert...
r29326 operation = 'record'
Laurent Charignon
record: move filterpatch from record to patch...
r24269
def prompt(skipfile, skipall, query, chunk):
"""prompt query, and process base inputs
- y/n for the rest of file
- y/n for the rest
- ? (help)
- q (quit)
Return True/False and possibly updated skipfile and skipall.
"""
newpatches = None
if skipall is not None:
return skipall, skipfile, skipall, newpatches
if skipfile is not None:
return skipfile, skipfile, skipall, newpatches
while True:
Pulkit Goyal
patch: take messages out of the function so that extensions can add entries...
r34044 resps = messages['help'][operation]
Laurent Charignon
record: move filterpatch from record to patch...
r24269 r = ui.promptchoice("%s %s" % (query, resps))
ui.write("\n")
if r == 8: # ?
for c, t in ui.extractchoices(resps)[1]:
FUJIWARA Katsunori
patch: show lower-ed translated message correctly...
r29154 ui.write('%s - %s\n' % (c, encoding.lower(t)))
Laurent Charignon
record: move filterpatch from record to patch...
r24269 continue
elif r == 0: # yes
ret = True
elif r == 1: # no
ret = False
elif r == 2: # Edit patch
if chunk is None:
ui.write(_('cannot edit patch for whole file'))
ui.write("\n")
continue
if chunk.header.binary():
ui.write(_('cannot edit patch for binary file'))
ui.write("\n")
continue
# Patch comment based on the Git one (based on comment at end of
Matt Mackall
urls: bulk-change primary website URLs
r26421 # https://mercurial-scm.org/wiki/RecordExtension)
Laurent Charignon
record: move filterpatch from record to patch...
r24269 phelp = '---' + _("""
To remove '-' lines, make them ' ' lines (context).
To remove '+' lines, delete them.
Lines starting with # will be removed from the patch.
If the patch applies cleanly, the edited hunk will immediately be
added to the record list. If it does not apply cleanly, a rejects
file will be generated: you can use that when you try again. If
all lines of the hunk are removed, then the edit is aborted and
the hunk is left unchanged.
""")
Yuya Nishihara
py3: wrap tempfile.mkstemp() to use bytes path...
r38182 (patchfd, patchfn) = pycompat.mkstemp(prefix="hg-editor-",
Yuya Nishihara
py3: wrap file object to write patch in native eol preserving byte-ness
r36855 suffix=".diff")
Laurent Charignon
record: move filterpatch from record to patch...
r24269 ncpatchfp = None
try:
# Write the initial patch
Yuya Nishihara
py3: wrap file object to write patch in native eol preserving byte-ness
r36855 f = util.nativeeolwriter(os.fdopen(patchfd, r'wb'))
Laurent Charignon
record: move filterpatch from record to patch...
r24269 chunk.header.write(f)
chunk.write(f)
f.write('\n'.join(['# ' + i for i in phelp.splitlines()]))
f.close()
# Start the editor and wait for it to complete
editor = ui.geteditor()
Laurent Charignon
record: exiting editor with non-zero status should not stop recording session...
r25483 ret = ui.system("%s \"%s\"" % (editor, patchfn),
Simon Farnsworth
patch: set a blockedtag when running an external filter
r31198 environ={'HGUSER': ui.username()},
blockedtag='filterpatch')
Laurent Charignon
record: exiting editor with non-zero status should not stop recording session...
r25483 if ret != 0:
ui.warn(_("editor exited with exit code %d\n") % ret)
continue
Laurent Charignon
record: move filterpatch from record to patch...
r24269 # Remove comment lines
Yuya Nishihara
py3: open patch file in binary mode and convert eol manually...
r36856 patchfp = open(patchfn, r'rb')
timeless
pycompat: switch to util.stringio for py3 compat
r28861 ncpatchfp = stringio()
Jun Wu
patch: migrate to util.iterfile
r30397 for line in util.iterfile(patchfp):
Yuya Nishihara
py3: open patch file in binary mode and convert eol manually...
r36856 line = util.fromnativeeol(line)
Laurent Charignon
record: move filterpatch from record to patch...
r24269 if not line.startswith('#'):
ncpatchfp.write(line)
patchfp.close()
ncpatchfp.seek(0)
newpatches = parsepatch(ncpatchfp)
finally:
os.unlink(patchfn)
del ncpatchfp
# Signal that the chunk shouldn't be applied as-is, but
# provide the new patch to be used instead.
ret = False
elif r == 3: # Skip
ret = skipfile = False
elif r == 4: # file (Record remaining)
ret = skipfile = True
elif r == 5: # done, skip remaining
ret = skipall = False
elif r == 6: # all
ret = skipall = True
elif r == 7: # quit
Pierre-Yves David
error: get Abort from 'error' instead of 'util'...
r26587 raise error.Abort(_('user quit'))
Laurent Charignon
record: move filterpatch from record to patch...
r24269 return ret, skipfile, skipall, newpatches
seen = set()
applied = {} # 'filename' -> [] of chunks
skipfile, skipall = None, None
pos, total = 1, sum(len(h.hunks) for h in headers)
for h in headers:
pos += len(h.hunks)
skipfile = None
fixoffset = 0
hdr = ''.join(h.header)
if hdr in seen:
continue
seen.add(hdr)
if skipall is None:
h.pretty(ui)
msg = (_('examine changes to %s?') %
_(' and ').join("'%s'" % f for f in h.files()))
r, skipfile, skipall, np = prompt(skipfile, skipall, msg, None)
if not r:
continue
applied[h.filename()] = [h]
if h.allhunks():
applied[h.filename()] += h.hunks
continue
for i, chunk in enumerate(h.hunks):
if skipfile is None and skipall is None:
chunk.pretty(ui)
if total == 1:
Pulkit Goyal
patch: take messages out of the function so that extensions can add entries...
r34044 msg = messages['single'][operation] % chunk.filename()
Laurent Charignon
record: move filterpatch from record to patch...
r24269 else:
idx = pos - len(h.hunks) + i
Pulkit Goyal
patch: take messages out of the function so that extensions can add entries...
r34044 msg = messages['multiple'][operation] % (idx, total,
chunk.filename())
Laurent Charignon
record: move filterpatch from record to patch...
r24269 r, skipfile, skipall, newpatches = prompt(skipfile,
skipall, msg, chunk)
if r:
if fixoffset:
chunk = copy.copy(chunk)
chunk.toline += fixoffset
applied[chunk.filename()].append(chunk)
elif newpatches is not None:
for newpatch in newpatches:
for newhunk in newpatch.hunks:
if fixoffset:
newhunk.toline += fixoffset
applied[newhunk.filename()].append(newhunk)
else:
fixoffset += chunk.removed - chunk.added
Laurent Charignon
commit: add a way to return more information from the chunkselector...
r27155 return (sum([h for h in applied.itervalues()
if h[0].special() or len(h) > 1], []), {})
Benoit Boissinot
use new style classes
r8778 class hunk(object):
Patrick Mezard
patch: refactor file creation/removal detection...
r14451 def __init__(self, desc, num, lr, context):
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 self.number = num
self.desc = desc
Matt Mackall
many, many trivial check-code fixups
r10282 self.hunk = [desc]
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 self.a = []
self.b = []
Benoit Boissinot
patch: initialize all attributes of the hunk class
r9682 self.starta = self.lena = None
self.startb = self.lenb = None
Patrick Mezard
patch: fix eolmode=auto with new files...
r10127 if lr is not None:
if context:
self.read_context_hunk(lr)
else:
self.read_unified_hunk(lr)
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897
Patrick Mezard
patch: fix eolmode=auto with new files...
r10127 def getnormalized(self):
"""Return a copy with line endings normalized to LF."""
def normalize(lines):
nlines = []
for line in lines:
if line.endswith('\r\n'):
line = line[:-2] + '\n'
nlines.append(line)
return nlines
# Dummy object, it is rebuilt manually
Patrick Mezard
patch: refactor file creation/removal detection...
r14451 nh = hunk(self.desc, self.number, None, None)
Patrick Mezard
patch: fix eolmode=auto with new files...
r10127 nh.number = self.number
nh.desc = self.desc
Patrick Mezard
patch: fix patching with fuzz and eol normalization
r10524 nh.hunk = self.hunk
Patrick Mezard
patch: fix eolmode=auto with new files...
r10127 nh.a = normalize(self.a)
nh.b = normalize(self.b)
nh.starta = self.starta
nh.startb = self.startb
nh.lena = self.lena
nh.lenb = self.lenb
return nh
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 def read_unified_hunk(self, lr):
m = unidesc.match(self.desc)
if not m:
Bryan O'Sullivan
patch.py: fix some incorrect uses of _() for i18n
r4898 raise PatchError(_("bad hunk #%d") % self.number)
Patrick Mezard
patch: simplify hunk extents parsing...
r15510 self.starta, self.lena, self.startb, self.lenb = m.groups()
Martin Geisler
use 'x is None' instead of 'x == None'...
r8527 if self.lena is None:
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 self.lena = 1
else:
self.lena = int(self.lena)
Martin Geisler
use 'x is None' instead of 'x == None'...
r8527 if self.lenb is None:
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 self.lenb = 1
else:
self.lenb = int(self.lenb)
self.starta = int(self.starta)
self.startb = int(self.startb)
Yuya Nishihara
patch: error out if reached to EOF while reading hunk...
r37591 try:
Yuya Nishihara
diffhelper: rename module to avoid conflicts with ancient C module (issue5846)...
r37821 diffhelper.addlines(lr, self.hunk, self.lena, self.lenb,
self.a, self.b)
Yuya Nishihara
patch: error out if reached to EOF while reading hunk...
r37591 except error.ParseError as e:
raise PatchError(_("bad hunk #%d: %s") % (self.number, e))
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 # if we hit eof before finishing out the hunk, the last line will
# be zero length. Lets try to fix it up.
while len(self.hunk[-1]) == 0:
Dirkjan Ochtman
fix double indentation and trailing whitespace
r6948 del self.hunk[-1]
del self.a[-1]
del self.b[-1]
self.lena -= 1
self.lenb -= 1
Patrick Mezard
patch: fix hunk newlines when parsing hunks, not in iterhunks()
r13699 self._fixnewline(lr)
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897
def read_context_hunk(self, lr):
self.desc = lr.readline()
m = contextdesc.match(self.desc)
if not m:
Bryan O'Sullivan
patch.py: fix some incorrect uses of _() for i18n
r4898 raise PatchError(_("bad hunk #%d") % self.number)
Patrick Mezard
patch: simplify hunk extents parsing...
r15510 self.starta, aend = m.groups()
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 self.starta = int(self.starta)
Martin Geisler
use 'x is None' instead of 'x == None'...
r8527 if aend is None:
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 aend = self.starta
self.lena = int(aend) - self.starta
if self.starta:
self.lena += 1
for x in xrange(self.lena):
l = lr.readline()
if l.startswith('---'):
Patrick Mezard
Test applying context diffs
r12825 # lines addition, old block is empty
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 lr.push(l)
break
s = l[2:]
if l.startswith('- ') or l.startswith('! '):
u = '-' + s
elif l.startswith(' '):
u = ' ' + s
else:
Bryan O'Sullivan
patch.py: fix some incorrect uses of _() for i18n
r4898 raise PatchError(_("bad hunk #%d old text line %d") %
(self.number, x))
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 self.a.append(u)
self.hunk.append(u)
l = lr.readline()
if l.startswith('\ '):
s = self.a[-1][:-1]
self.a[-1] = s
self.hunk[-1] = s
l = lr.readline()
m = contextdesc.match(l)
if not m:
Bryan O'Sullivan
patch.py: fix some incorrect uses of _() for i18n
r4898 raise PatchError(_("bad hunk #%d") % self.number)
Patrick Mezard
patch: simplify hunk extents parsing...
r15510 self.startb, bend = m.groups()
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 self.startb = int(self.startb)
Martin Geisler
use 'x is None' instead of 'x == None'...
r8527 if bend is None:
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 bend = self.startb
self.lenb = int(bend) - self.startb
if self.startb:
self.lenb += 1
hunki = 1
for x in xrange(self.lenb):
l = lr.readline()
if l.startswith('\ '):
Patrick Mezard
Test applying context diffs
r12825 # XXX: the only way to hit this is with an invalid line range.
# The no-eol marker is not counted in the line range, but I
# guess there are diff(1) out there which behave differently.
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 s = self.b[-1][:-1]
self.b[-1] = s
Matt Mackall
many, many trivial check-code fixups
r10282 self.hunk[hunki - 1] = s
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 continue
if not l:
Patrick Mezard
Test applying context diffs
r12825 # line deletions, new block is empty and we hit EOF
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 lr.push(l)
break
s = l[2:]
if l.startswith('+ ') or l.startswith('! '):
u = '+' + s
elif l.startswith(' '):
u = ' ' + s
elif len(self.b) == 0:
Patrick Mezard
Test applying context diffs
r12825 # line deletions, new block is empty
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 lr.push(l)
break
else:
Bryan O'Sullivan
patch.py: fix some incorrect uses of _() for i18n
r4898 raise PatchError(_("bad hunk #%d old text line %d") %
(self.number, x))
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 self.b.append(s)
while True:
if hunki >= len(self.hunk):
h = ""
else:
h = self.hunk[hunki]
hunki += 1
if h == u:
break
elif h.startswith('-'):
continue
else:
Matt Mackall
many, many trivial check-code fixups
r10282 self.hunk.insert(hunki - 1, u)
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 break
if not self.a:
# this happens when lines were only added to the hunk
for x in self.hunk:
if x.startswith('-') or x.startswith(' '):
self.a.append(x)
if not self.b:
# this happens when lines were only deleted from the hunk
for x in self.hunk:
if x.startswith('+') or x.startswith(' '):
self.b.append(x[1:])
# @@ -start,len +start,len @@
self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
self.startb, self.lenb)
self.hunk[0] = self.desc
Patrick Mezard
patch: fix hunk newlines when parsing hunks, not in iterhunks()
r13699 self._fixnewline(lr)
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897
Patrick Mezard
patch: fix hunk newlines when parsing hunks, not in iterhunks()
r13699 def _fixnewline(self, lr):
l = lr.readline()
if l.startswith('\ '):
Yuya Nishihara
diffhelper: rename module to avoid conflicts with ancient C module (issue5846)...
r37821 diffhelper.fixnewline(self.hunk, self.a, self.b)
Patrick Mezard
patch: fix hunk newlines when parsing hunks, not in iterhunks()
r13699 else:
lr.push(l)
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897
def complete(self):
return len(self.a) == self.lena and len(self.b) == self.lenb
Patrick Mezard
patch: fuzz old and new lines at the same time...
r16121 def _fuzzit(self, old, new, fuzz, toponly):
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 # this removes context lines from the top and bottom of list 'l'. It
# checks the hunk to make sure only context lines are removed, and then
# returns a new shortened list of lines.
Patrick Mezard
patch: fuzz more aggressively to match patch(1) behaviour...
r16124 fuzz = min(fuzz, len(old))
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 if fuzz:
top = 0
bot = 0
hlen = len(self.hunk)
Matt Mackall
many, many trivial check-code fixups
r10282 for x in xrange(hlen - 1):
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 # the hunk starts with the @@ line, so use x+1
Yuya Nishihara
py3: use s.startswith() instead of s[n] while parsing patches...
r37489 if self.hunk[x + 1].startswith(' '):
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 top += 1
else:
break
if not toponly:
Matt Mackall
many, many trivial check-code fixups
r10282 for x in xrange(hlen - 1):
Yuya Nishihara
py3: use s.startswith() instead of s[n] while parsing patches...
r37489 if self.hunk[hlen - bot - 1].startswith(' '):
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 bot += 1
else:
break
Patrick Mezard
patch: fuzz more aggressively to match patch(1) behaviour...
r16124 bot = min(fuzz, bot)
top = min(fuzz, top)
Mads Kiilerich
check-code: there must also be whitespace between ')' and operator...
r18054 return old[top:len(old) - bot], new[top:len(new) - bot], top
Patrick Mezard
patch: make hunk.fuzzit() compute the fuzzed start locations...
r16122 return old, new, 0
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897
Patrick Mezard
patch: fuzz old and new lines at the same time...
r16121 def fuzzit(self, fuzz, toponly):
Patrick Mezard
patch: make hunk.fuzzit() compute the fuzzed start locations...
r16122 old, new, top = self._fuzzit(self.a, self.b, fuzz, toponly)
oldstart = self.starta + top
newstart = self.startb + top
# zero length hunk ranges already have their start decremented
Yuya Nishihara
patch: fix segfault against unified diffs which start line is zero...
r16650 if self.lena and oldstart > 0:
Patrick Mezard
patch: make hunk.fuzzit() compute the fuzzed start locations...
r16122 oldstart -= 1
Yuya Nishihara
patch: fix segfault against unified diffs which start line is zero...
r16650 if self.lenb and newstart > 0:
Patrick Mezard
patch: make hunk.fuzzit() compute the fuzzed start locations...
r16122 newstart -= 1
return old, oldstart, new, newstart
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897
Thomas Arendsen Hein
classes: fix class style problems found by b071cd58af50...
r14764 class binhunk(object):
Nicolas Vigier
patch: add support for git delta hunks...
r20137 'A binary patch file.'
Patrick Mezard
patch: include file name in binary patch error messages...
r16523 def __init__(self, lr, fname):
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 self.text = None
Nicolas Vigier
patch: add support for git delta hunks...
r20137 self.delta = False
Patrick Mezard
patch: handle symlinks without symlinkhunk...
r9585 self.hunk = ['GIT binary patch\n']
Patrick Mezard
patch: include file name in binary patch error messages...
r16523 self._fname = fname
Patrick Mezard
patch: construct and parse binary hunks at the same time
r14384 self._read(lr)
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897
def complete(self):
return self.text is not None
Nicolas Vigier
patch: add support for git delta hunks...
r20137 def new(self, lines):
if self.delta:
return [applybindelta(self.text, ''.join(lines))]
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 return [self.text]
Patrick Mezard
patch: construct and parse binary hunks at the same time
r14384 def _read(self, lr):
Patrick Mezard
patch: be more tolerant with EOLs in binary diffs (issue2870)...
r16524 def getline(lr, hunk):
l = lr.readline()
hunk.append(l)
return l.rstrip('\r\n')
Nicolas Vigier
patch: add support for git delta hunks...
r20137 size = 0
Patrick Mezard
patch: clarify binary hunk parsing loop
r16567 while True:
Patrick Mezard
patch: be more tolerant with EOLs in binary diffs (issue2870)...
r16524 line = getline(lr, self.hunk)
Patrick Mezard
patch: clarify binary hunk parsing loop
r16567 if not line:
raise PatchError(_('could not extract "%s" binary data')
% self._fname)
if line.startswith('literal '):
Nicolas Vigier
patch: add support for git delta hunks...
r20137 size = int(line[8:].rstrip())
Patrick Mezard
patch: clarify binary hunk parsing loop
r16567 break
Nicolas Vigier
patch: add support for git delta hunks...
r20137 if line.startswith('delta '):
size = int(line[6:].rstrip())
self.delta = True
break
Brendan Cully
Add git-1.4 binary patch support
r3367 dec = []
Patrick Mezard
patch: be more tolerant with EOLs in binary diffs (issue2870)...
r16524 line = getline(lr, self.hunk)
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 while len(line) > 1:
Pulkit Goyal
py3: slice over bytes to prevent getting ascii values...
r36210 l = line[0:1]
Brendan Cully
Use line length field when extracting git binary patches
r3374 if l <= 'Z' and l >= 'A':
l = ord(l) - ord('A') + 1
else:
l = ord(l) - ord('a') + 27
Patrick Mezard
patch: display a nice error for invalid base85 data...
r16522 try:
Yuya Nishihara
base85: proxy through util module...
r32200 dec.append(util.b85decode(line[1:])[:l])
Gregory Szorc
global: mass rewrite to use modern exception syntax...
r25660 except ValueError as e:
Patrick Mezard
patch: include file name in binary patch error messages...
r16523 raise PatchError(_('could not decode "%s" binary patch: %s')
Yuya Nishihara
stringutil: bulk-replace call sites to point to new module...
r37102 % (self._fname, stringutil.forcebytestr(e)))
Patrick Mezard
patch: be more tolerant with EOLs in binary diffs (issue2870)...
r16524 line = getline(lr, self.hunk)
Brendan Cully
Add git-1.4 binary patch support
r3367 text = zlib.decompress(''.join(dec))
if len(text) != size:
Patrick Mezard
patch: include file name in binary patch error messages...
r16523 raise PatchError(_('"%s" length is %d bytes, should be %d')
% (self._fname, len(text), size))
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 self.text = text
Brendan Cully
Add git-1.4 binary patch support
r3367
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 def parsefilename(str):
# --- filename \t|space stuff
Patrick Mezard
patch: remove CRLF when parsing file names
r5851 s = str[4:].rstrip('\r\n')
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 i = s.find('\t')
if i < 0:
i = s.find(' ')
if i < 0:
return s
return s[:i]
Brendan Cully
Move patch-related code into its own module.
r2861
Laurent Charignon
revert: add an experimental config to use inverted selection...
r25424 def reversehunks(hunks):
'''reverse the signs in the hunks given as argument
This function operates on hunks coming out of patch.filterpatch, that is
a list of the form: [header1, hunk1, hunk2, header2...]. Example usage:
Yuya Nishihara
doctest: bulk-replace string literals with b'' for Python 3...
r34133 >>> rawpatch = b"""diff --git a/folder1/g b/folder1/g
Laurent Charignon
revert: add an experimental config to use inverted selection...
r25424 ... --- a/folder1/g
... +++ b/folder1/g
... @@ -1,7 +1,7 @@
... +firstline
... c
... 1
... 2
... + 3
... -4
... 5
... d
... +lastline"""
Yuya Nishihara
py3: fix doctests in patch.py to be compatible with Python 3...
r34254 >>> hunks = parsepatch([rawpatch])
Laurent Charignon
revert: add an experimental config to use inverted selection...
r25424 >>> hunkscomingfromfilterpatch = []
>>> for h in hunks:
... hunkscomingfromfilterpatch.append(h)
... hunkscomingfromfilterpatch.extend(h.hunks)
>>> reversedhunks = reversehunks(hunkscomingfromfilterpatch)
timeless
pycompat: switch to util.stringio for py3 compat
r28861 >>> from . import util
>>> fp = util.stringio()
Laurent Charignon
revert: add an experimental config to use inverted selection...
r25424 >>> for c in reversedhunks:
... c.write(fp)
Yuya Nishihara
py3: fix doctests in patch.py to be compatible with Python 3...
r34254 >>> fp.seek(0) or None
Laurent Charignon
revert: add an experimental config to use inverted selection...
r25424 >>> reversedpatch = fp.read()
Yuya Nishihara
doctest: use print_function and convert bytes to unicode where needed
r34139 >>> print(pycompat.sysstr(reversedpatch))
Laurent Charignon
revert: add an experimental config to use inverted selection...
r25424 diff --git a/folder1/g b/folder1/g
--- a/folder1/g
+++ b/folder1/g
@@ -1,4 +1,3 @@
-firstline
c
1
2
Jun Wu
patch: rewrite reversehunks (issue5337)...
r32979 @@ -2,6 +1,6 @@
Laurent Charignon
revert: add an experimental config to use inverted selection...
r25424 c
1
2
- 3
+4
5
d
Jun Wu
patch: rewrite reversehunks (issue5337)...
r32979 @@ -6,3 +5,2 @@
Laurent Charignon
revert: add an experimental config to use inverted selection...
r25424 5
d
-lastline
'''
newhunks = []
for c in hunks:
Jun Wu
patch: rewrite reversehunks (issue5337)...
r32979 if util.safehasattr(c, 'reversehunk'):
c = c.reversehunk()
Laurent Charignon
revert: add an experimental config to use inverted selection...
r25424 newhunks.append(c)
return newhunks
Jun Wu
patch: make parsepatch optionally trim context lines...
r33270 def parsepatch(originalchunks, maxcontext=None):
"""patch -> [] of headers -> [] of hunks
If maxcontext is not None, trim context lines if necessary.
Yuya Nishihara
doctest: bulk-replace string literals with b'' for Python 3...
r34133 >>> rawpatch = b'''diff --git a/folder1/g b/folder1/g
Jun Wu
patch: make parsepatch optionally trim context lines...
r33270 ... --- a/folder1/g
... +++ b/folder1/g
... @@ -1,8 +1,10 @@
... 1
... 2
... -3
... 4
... 5
... 6
... +6.1
... +6.2
... 7
... 8
... +9'''
>>> out = util.stringio()
>>> headers = parsepatch([rawpatch], maxcontext=1)
>>> for header in headers:
... header.write(out)
... for hunk in header.hunks:
... hunk.write(out)
Yuya Nishihara
doctest: use print_function and convert bytes to unicode where needed
r34139 >>> print(pycompat.sysstr(out.getvalue()))
Jun Wu
patch: make parsepatch optionally trim context lines...
r33270 diff --git a/folder1/g b/folder1/g
--- a/folder1/g
+++ b/folder1/g
@@ -2,3 +2,2 @@
2
-3
4
@@ -6,2 +5,4 @@
6
+6.1
+6.2
7
@@ -8,1 +9,2 @@
8
+9
"""
Laurent Charignon
record: move parsepatch from record to patch...
r24265 class parser(object):
"""patch parsing state machine"""
def __init__(self):
self.fromline = 0
self.toline = 0
self.proc = ''
self.header = None
self.context = []
self.before = []
self.hunk = []
self.headers = []
def addrange(self, limits):
fromstart, fromend, tostart, toend, proc = limits
self.fromline = int(fromstart)
self.toline = int(tostart)
self.proc = proc
def addcontext(self, context):
if self.hunk:
h = recordhunk(self.header, self.fromline, self.toline,
Jun Wu
patch: make parsepatch optionally trim context lines...
r33270 self.proc, self.before, self.hunk, context, maxcontext)
Laurent Charignon
record: move parsepatch from record to patch...
r24265 self.header.hunks.append(h)
self.fromline += len(self.before) + h.removed
self.toline += len(self.before) + h.added
self.before = []
self.hunk = []
self.context = context
def addhunk(self, hunk):
if self.context:
self.before = self.context
self.context = []
self.hunk = hunk
def newfile(self, hdr):
self.addcontext([])
h = header(hdr)
self.headers.append(h)
self.header = h
def addother(self, line):
pass # 'other' lines are ignored
def finished(self):
self.addcontext([])
return self.headers
transitions = {
'file': {'context': addcontext,
'file': newfile,
'hunk': addhunk,
'range': addrange},
'context': {'file': newfile,
'hunk': addhunk,
'range': addrange,
'other': addother},
'hunk': {'context': addcontext,
'file': newfile,
'range': addrange},
'range': {'context': addcontext,
'hunk': addhunk},
'other': {'other': addother},
}
p = parser()
timeless
pycompat: switch to util.stringio for py3 compat
r28861 fp = stringio()
Laurent Charignon
record: change interface of the filtering function...
r24341 fp.write(''.join(originalchunks))
fp.seek(0)
Laurent Charignon
record: move parsepatch from record to patch...
r24265
state = 'context'
for newstate, data in scanpatch(fp):
try:
p.transitions[state][newstate](p, data)
except KeyError:
raise PatchError('unhandled transition: %s -> %s' %
(state, newstate))
state = newstate
Laurent Charignon
record: change interface of the filtering function...
r24341 del fp
Laurent Charignon
record: move parsepatch from record to patch...
r24265 return p.finished()
Siddharth Agarwal
patch.pathtransform: add a prefix parameter...
r24244 def pathtransform(path, strip, prefix):
Siddharth Agarwal
patch.pathtransform: add doctests...
r24243 '''turn a path from a patch into a path suitable for the repository
Siddharth Agarwal
patch.pathtransform: add a prefix parameter...
r24244 prefix, if not empty, is expected to be normalized with a / at the end.
Siddharth Agarwal
patch.pathtransform: add doctests...
r24243 Returns (stripped components, path in repository).
Yuya Nishihara
doctest: bulk-replace string literals with b'' for Python 3...
r34133 >>> pathtransform(b'a/b/c', 0, b'')
Siddharth Agarwal
patch.pathtransform: add doctests...
r24243 ('', 'a/b/c')
Yuya Nishihara
doctest: bulk-replace string literals with b'' for Python 3...
r34133 >>> pathtransform(b' a/b/c ', 0, b'')
Siddharth Agarwal
patch.pathtransform: add doctests...
r24243 ('', ' a/b/c')
Yuya Nishihara
doctest: bulk-replace string literals with b'' for Python 3...
r34133 >>> pathtransform(b' a/b/c ', 2, b'')
Siddharth Agarwal
patch.pathtransform: add doctests...
r24243 ('a/b/', 'c')
Yuya Nishihara
doctest: bulk-replace string literals with b'' for Python 3...
r34133 >>> pathtransform(b'a/b/c', 0, b'd/e/')
Siddharth Agarwal
patch.pathtransform: prepend prefix even if strip is 0...
r24385 ('', 'd/e/a/b/c')
Yuya Nishihara
doctest: bulk-replace string literals with b'' for Python 3...
r34133 >>> pathtransform(b' a//b/c ', 2, b'd/e/')
Siddharth Agarwal
patch.pathtransform: add a prefix parameter...
r24244 ('a//b/', 'd/e/c')
Yuya Nishihara
doctest: bulk-replace string literals with b'' for Python 3...
r34133 >>> pathtransform(b'a/b/c', 3, b'')
Siddharth Agarwal
patch.pathtransform: add doctests...
r24243 Traceback (most recent call last):
PatchError: unable to strip away 1 of 3 dirs from a/b/c
'''
Mads Kiilerich
patch: strip paths in leaked git patchmeta objects
r11022 pathlen = len(path)
i = 0
if strip == 0:
Siddharth Agarwal
patch.pathtransform: prepend prefix even if strip is 0...
r24385 return '', prefix + path.rstrip()
Mads Kiilerich
patch: strip paths in leaked git patchmeta objects
r11022 count = strip
while count > 0:
i = path.find('/', i)
if i == -1:
raise PatchError(_("unable to strip away %d of %d dirs from %s") %
(count, strip, path))
i += 1
# consume '//' in the path
Yuya Nishihara
py3: replace bytes[n] with bytes[n:n + 1] in patch.py where needed
r34069 while i < pathlen - 1 and path[i:i + 1] == '/':
Mads Kiilerich
patch: strip paths in leaked git patchmeta objects
r11022 i += 1
count -= 1
Siddharth Agarwal
patch.pathtransform: add a prefix parameter...
r24244 return path[:i].lstrip(), prefix + path[i:].rstrip()
Mads Kiilerich
patch: strip paths in leaked git patchmeta objects
r11022
Siddharth Agarwal
patch.makepatchmeta: accept a prefix parameter...
r24245 def makepatchmeta(backend, afile_orig, bfile_orig, hunk, strip, prefix):
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 nulla = afile_orig == "/dev/null"
nullb = bfile_orig == "/dev/null"
Patrick Mezard
patch: refactor file creation/removal detection...
r14451 create = nulla and hunk.starta == 0 and hunk.lena == 0
remove = nullb and hunk.startb == 0 and hunk.lenb == 0
Siddharth Agarwal
patch.makepatchmeta: accept a prefix parameter...
r24245 abase, afile = pathtransform(afile_orig, strip, prefix)
Patrick Mezard
patch: add lexists() to backends, use it in selectfile()...
r14351 gooda = not nulla and backend.exists(afile)
Siddharth Agarwal
patch.makepatchmeta: accept a prefix parameter...
r24245 bbase, bfile = pathtransform(bfile_orig, strip, prefix)
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 if afile == bfile:
goodb = gooda
else:
Patrick Mezard
patch: add lexists() to backends, use it in selectfile()...
r14351 goodb = not nullb and backend.exists(bfile)
Patrick Mezard
patch: refactor file creation/removal detection...
r14451 missing = not goodb and not gooda and not create
Brendan Cully
patch: create file even if source is not /dev/null...
r9328
Martin Geisler
patch: fix typo in comment
r11820 # some diff programs apparently produce patches where the afile is
# not /dev/null, but afile starts with bfile
Benoit Boissinot
patch: try harder to find the file to patch on file creation (issue2041)...
r10745 abasedir = afile[:afile.rfind('/') + 1]
bbasedir = bfile[:bfile.rfind('/') + 1]
Patrick Mezard
patch: refactor file creation/removal detection...
r14451 if (missing and abasedir == bbasedir and afile.startswith(bfile)
and hunk.starta == 0 and hunk.lena == 0):
create = True
missing = False
Brendan Cully
patch: create file even if source is not /dev/null...
r9328
Patrick Mezard
patch: fix corner case with update + copy patch handling (issue 937)...
r6295 # If afile is "a/b/foo" and bfile is "a/b/foo.orig" we assume the
# diff is between a file and its backup. In this case, the original
# file should be patched (see original mpatch code).
isbackup = (abase == bbase and bfile.startswith(afile))
Patrick Mezard
patch: write rej files for missing targets (issue 853)
r5652 fname = None
if not missing:
if gooda and goodb:
Jordi Gutiérrez Hermoso
style: kill ersatz if-else ternary operators...
r24306 if isbackup:
fname = afile
else:
fname = bfile
Patrick Mezard
patch: write rej files for missing targets (issue 853)
r5652 elif gooda:
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 fname = afile
Thomas Arendsen Hein
Removed tabs and trailing whitespace in python files
r5760
Patrick Mezard
patch: write rej files for missing targets (issue 853)
r5652 if not fname:
if not nullb:
Jordi Gutiérrez Hermoso
style: kill ersatz if-else ternary operators...
r24306 if isbackup:
fname = afile
else:
fname = bfile
Patrick Mezard
patch: write rej files for missing targets (issue 853)
r5652 elif not nulla:
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 fname = afile
Patrick Mezard
patch: write rej files for missing targets (issue 853)
r5652 else:
raise PatchError(_("undefined source and destination files"))
Thomas Arendsen Hein
Removed tabs and trailing whitespace in python files
r5760
Patrick Mezard
patch: generalize the use of patchmeta in applydiff()...
r14566 gp = patchmeta(fname)
if create:
gp.op = 'ADD'
elif remove:
gp.op = 'DELETE'
return gp
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897
Laurent Charignon
record: move scanpatch from record to patch...
r24264 def scanpatch(fp):
"""like patch.iterhunks, but yield different events
- ('file', [header_lines + fromfile + tofile])
- ('context', [context_lines])
- ('hunk', [hunk_lines])
- ('range', (-start,len, +start,len, proc))
"""
Yuya Nishihara
py3: fix type of regex literals in patch.py
r34068 lines_re = re.compile(br'@@ -(\d+),(\d+) \+(\d+),(\d+) @@\s*(.*)')
Laurent Charignon
record: move scanpatch from record to patch...
r24264 lr = linereader(fp)
def scanwhile(first, p):
"""scan lr while predicate holds"""
lines = [first]
Augie Fackler
patch: use `iter(callable, sentinel)` instead of while True...
r29726 for line in iter(lr.readline, ''):
Laurent Charignon
record: move scanpatch from record to patch...
r24264 if p(line):
lines.append(line)
else:
lr.push(line)
break
return lines
Augie Fackler
patch: use `iter(callable, sentinel)` instead of while True...
r29726 for line in iter(lr.readline, ''):
Laurent Charignon
record: move scanpatch from record to patch...
r24264 if line.startswith('diff --git a/') or line.startswith('diff -r '):
def notheader(line):
s = line.split(None, 1)
return not s or s[0] not in ('---', 'diff')
header = scanwhile(line, notheader)
fromfile = lr.readline()
if fromfile.startswith('---'):
tofile = lr.readline()
header += [fromfile, tofile]
else:
lr.push(fromfile)
yield 'file', header
Yuya Nishihara
py3: use s.startswith() instead of s[n] while parsing patches...
r37489 elif line.startswith(' '):
cs = (' ', '\\')
yield 'context', scanwhile(line, lambda l: l.startswith(cs))
elif line.startswith(('-', '+')):
cs = ('-', '+', '\\')
yield 'hunk', scanwhile(line, lambda l: l.startswith(cs))
Laurent Charignon
record: move scanpatch from record to patch...
r24264 else:
m = lines_re.match(line)
if m:
yield 'range', m.groups()
else:
yield 'other', line
Patrick Mezard
patch: pass linereader to scangitpatch(), extract from iterhunks()...
r7152 def scangitpatch(lr, firstline):
Dirkjan Ochtman
clean up trailing spaces, leading spaces in C
r7186 """
Patrick Mezard
patch: pass linereader to scangitpatch(), extract from iterhunks()...
r7152 Git patches can emit:
- rename a to b
- change b
- copy a to c
- change c
Dirkjan Ochtman
clean up trailing spaces, leading spaces in C
r7186
Patrick Mezard
patch: pass linereader to scangitpatch(), extract from iterhunks()...
r7152 We cannot apply this sequence as-is, the renamed 'a' could not be
found for it would have been renamed already. And we cannot copy
from 'b' instead because 'b' would have been changed already. So
we scan the git patch for copy and rename commands so we can
perform the copies ahead of time.
"""
pos = 0
try:
pos = lr.fp.tell()
fp = lr.fp
except IOError:
timeless
pycompat: switch to util.stringio for py3 compat
r28861 fp = stringio(lr.fp.read())
Patrick Mezard
patch: remove EOL support from linereader class...
r14418 gitlr = linereader(fp)
Patrick Mezard
patch: pass linereader to scangitpatch(), extract from iterhunks()...
r7152 gitlr.push(firstline)
Patrick Mezard
patch: remove unused flags from readgitpatch()
r12669 gitpatches = readgitpatch(gitlr)
Patrick Mezard
patch: pass linereader to scangitpatch(), extract from iterhunks()...
r7152 fp.seek(pos)
Patrick Mezard
patch: remove unused flags from readgitpatch()
r12669 return gitpatches
Patrick Mezard
patch: pass linereader to scangitpatch(), extract from iterhunks()...
r7152
Idan Kamara
patch: remove unused ui arg to iterhunks
r14240 def iterhunks(fp):
Patrick Mezard
patch: move diff parsing in iterhunks generator
r5650 """Read a patch and yield the following events:
- ("file", afile, bfile, firsthunk): select a new target file.
- ("hunk", hunk): a new hunk is ready to be applied, follows a
"file" event.
- ("git", gitchanges): current diff is in git format, gitchanges
maps filenames to gitpatch records. Unique event.
"""
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 afile = ""
bfile = ""
state = None
hunknum = 0
Patrick Mezard
patch: remove redundant variable in iterhunks()
r14017 emitfile = newfile = False
Patrick Mezard
patch: stop handling hunkless git blocks out of stream...
r14388 gitpatches = None
Brendan Cully
Move patch-related code into its own module.
r2861
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 # our states
BFILE = 1
context = None
Patrick Mezard
patch: drop eol normalization fast-path for 'lf' and 'crlf'...
r10128 lr = linereader(fp)
Brendan Cully
Move patch-related code into its own module.
r2861
Augie Fackler
patch: use `iter(callable, sentinel)` instead of while True...
r29726 for x in iter(lr.readline, ''):
Patrick Mezard
patch: refactor iterhunks() regular and binary files emission
r14383 if state == BFILE and (
Yuya Nishihara
py3: fix slicing of bytes in patch.iterhunks()
r36664 (not context and x.startswith('@'))
Patrick Mezard
patch: refactor iterhunks() regular and binary files emission
r14383 or (context is not False and x.startswith('***************'))
or x.startswith('GIT binary patch')):
Patrick Mezard
patch: stop handling hunkless git blocks out of stream...
r14388 gp = None
Patrick Mezard
patch: fix patchmeta/hunk synchronization in iterhunks()...
r14534 if (gitpatches and
Patrick Mezard
patch: fix patch hunk/metdata synchronization (issue3384)...
r16506 gitpatches[-1].ispatching(afile, bfile)):
gp = gitpatches.pop()
Patrick Mezard
patch: refactor iterhunks() regular and binary files emission
r14383 if x.startswith('GIT binary patch'):
Patrick Mezard
patch: include file name in binary patch error messages...
r16523 h = binhunk(lr, gp.path)
Patrick Mezard
patch: refactor iterhunks() regular and binary files emission
r14383 else:
if context is None and x.startswith('***************'):
context = True
Patrick Mezard
patch: refactor file creation/removal detection...
r14451 h = hunk(x, hunknum + 1, lr, context)
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 hunknum += 1
Patrick Mezard
patch: move diff parsing in iterhunks generator
r5650 if emitfile:
emitfile = False
Patrick Mezard
patch: generalize the use of patchmeta in applydiff()...
r14566 yield 'file', (afile, bfile, h, gp and gp.copy() or None)
Patrick Mezard
patch: fix hunk newlines when parsing hunks, not in iterhunks()
r13699 yield 'hunk', h
Sean Farley
patch: match 'diff --git a/' instead of 'diff --git'...
r18830 elif x.startswith('diff --git a/'):
Patrick Mezard
patch: be more tolerant with EOLs in binary diffs (issue2870)...
r16524 m = gitre.match(x.rstrip(' \r\n'))
Patrick Mezard
patch: reindent code
r14387 if not m:
continue
Patrick Mezard
patch: fix patch hunk/metdata synchronization (issue3384)...
r16506 if gitpatches is None:
Patrick Mezard
patch: reindent code
r14387 # scan whole input for git metadata
Patrick Mezard
patch: fix patch hunk/metdata synchronization (issue3384)...
r16506 gitpatches = scangitpatch(lr, x)
yield 'git', [g.copy() for g in gitpatches
if g.op in ('COPY', 'RENAME')]
Patrick Mezard
patch: stop handling hunkless git blocks out of stream...
r14388 gitpatches.reverse()
Patrick Mezard
patch: reindent code
r14387 afile = 'a/' + m.group(1)
bfile = 'b/' + m.group(2)
Patrick Mezard
patch: fix patch hunk/metdata synchronization (issue3384)...
r16506 while gitpatches and not gitpatches[-1].ispatching(afile, bfile):
gp = gitpatches.pop()
Patrick Mezard
patch: generalize the use of patchmeta in applydiff()...
r14566 yield 'file', ('a/' + gp.path, 'b/' + gp.path, None, gp.copy())
Patrick Mezard
patch: fix patch hunk/metdata synchronization (issue3384)...
r16506 if not gitpatches:
raise PatchError(_('failed to synchronize metadata for "%s"')
% afile[2:])
gp = gitpatches[-1]
Patrick Mezard
patch: reindent code
r14387 newfile = True
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 elif x.startswith('---'):
# check for a unified diff
l2 = lr.readline()
if not l2.startswith('+++'):
lr.push(l2)
continue
newfile = True
context = False
afile = parsefilename(x)
bfile = parsefilename(l2)
elif x.startswith('***'):
# check for a context diff
l2 = lr.readline()
if not l2.startswith('---'):
lr.push(l2)
continue
l3 = lr.readline()
lr.push(l3)
if not l3.startswith("***************"):
lr.push(l2)
continue
newfile = True
context = True
afile = parsefilename(x)
bfile = parsefilename(l2)
Benoit Boissinot
unlink temporary patch files even when an exception is raised
r3057
Patrick Mezard
patch: remove redundant variable in iterhunks()
r14017 if newfile:
newfile = False
Patrick Mezard
patch: move diff parsing in iterhunks generator
r5650 emitfile = True
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 state = BFILE
hunknum = 0
Patrick Mezard
patch: move diff parsing in iterhunks generator
r5650
Patrick Mezard
patch: stop handling hunkless git blocks out of stream...
r14388 while gitpatches:
Patrick Mezard
patch: fix patch hunk/metdata synchronization (issue3384)...
r16506 gp = gitpatches.pop()
Patrick Mezard
patch: generalize the use of patchmeta in applydiff()...
r14566 yield 'file', ('a/' + gp.path, 'b/' + gp.path, None, gp.copy())
Patrick Mezard
patch: stop handling hunkless git blocks out of stream...
r14388
Nicolas Vigier
patch: add support for git delta hunks...
r20137 def applybindelta(binchunk, data):
"""Apply a binary delta hunk
The algorithm used is the algorithm from git's patch-delta.c
"""
def deltahead(binchunk):
i = 0
Pulkit Goyal
py3: bytestr() bytes to get bytechar while iterating on it...
r38096 for c in pycompat.bytestr(binchunk):
Nicolas Vigier
patch: add support for git delta hunks...
r20137 i += 1
if not (ord(c) & 0x80):
return i
return i
out = ""
s = deltahead(binchunk)
binchunk = binchunk[s:]
s = deltahead(binchunk)
binchunk = binchunk[s:]
i = 0
while i < len(binchunk):
Pulkit Goyal
py3: slice over bytes to prevent getting the ascii values...
r38095 cmd = ord(binchunk[i:i + 1])
Nicolas Vigier
patch: add support for git delta hunks...
r20137 i += 1
if (cmd & 0x80):
offset = 0
size = 0
if (cmd & 0x01):
Pulkit Goyal
py3: slice over bytes to prevent getting the ascii values...
r38095 offset = ord(binchunk[i:i + 1])
Nicolas Vigier
patch: add support for git delta hunks...
r20137 i += 1
if (cmd & 0x02):
Pulkit Goyal
py3: slice over bytes to prevent getting the ascii values...
r38095 offset |= ord(binchunk[i:i + 1]) << 8
Nicolas Vigier
patch: add support for git delta hunks...
r20137 i += 1
if (cmd & 0x04):
Pulkit Goyal
py3: slice over bytes to prevent getting the ascii values...
r38095 offset |= ord(binchunk[i:i + 1]) << 16
Nicolas Vigier
patch: add support for git delta hunks...
r20137 i += 1
if (cmd & 0x08):
Pulkit Goyal
py3: slice over bytes to prevent getting the ascii values...
r38095 offset |= ord(binchunk[i:i + 1]) << 24
Nicolas Vigier
patch: add support for git delta hunks...
r20137 i += 1
if (cmd & 0x10):
Pulkit Goyal
py3: slice over bytes to prevent getting the ascii values...
r38095 size = ord(binchunk[i:i + 1])
Nicolas Vigier
patch: add support for git delta hunks...
r20137 i += 1
if (cmd & 0x20):
Pulkit Goyal
py3: slice over bytes to prevent getting the ascii values...
r38095 size |= ord(binchunk[i:i + 1]) << 8
Nicolas Vigier
patch: add support for git delta hunks...
r20137 i += 1
if (cmd & 0x40):
Pulkit Goyal
py3: slice over bytes to prevent getting the ascii values...
r38095 size |= ord(binchunk[i:i + 1]) << 16
Nicolas Vigier
patch: add support for git delta hunks...
r20137 i += 1
if size == 0:
size = 0x10000
offset_end = offset + size
out += data[offset:offset_end]
elif cmd != 0:
offset_end = i + cmd
out += binchunk[i:offset_end]
i += cmd
else:
raise PatchError(_('unexpected delta opcode 0'))
return out
Siddharth Agarwal
patch.applydiff: accept a prefix parameter...
r24247 def applydiff(ui, fp, backend, store, strip=1, prefix='', eolmode='strict'):
Augie Fackler
patch: refactor applydiff to allow for mempatching
r10966 """Reads a patch from fp and tries to apply it.
Patrick Mezard
patch: move diff parsing in iterhunks generator
r5650
Patrick Mezard
patch: stop updating changed files set in applydiff()...
r14565 Returns 0 for a clean patch, -1 if any rejects were found and 1 if
there was any fuzz.
Patrick Mezard
Add patch.eol to ignore EOLs when patching (issue1019)...
r8810
Martin Geisler
patch: propagate eolmode down to patchfile...
r10101 If 'eolmode' is 'strict', the patch content and patched file are
read in binary mode. Otherwise, line endings are ignored when
patching then normalized according to 'eolmode'.
Patrick Mezard
Add patch.eol to ignore EOLs when patching (issue1019)...
r8810 """
Patrick Mezard
patch: stop updating changed files set in applydiff()...
r14565 return _applydiff(ui, fp, patchfile, backend, store, strip=strip,
Siddharth Agarwal
patch.applydiff: accept a prefix parameter...
r24247 prefix=prefix, eolmode=eolmode)
Augie Fackler
patch: refactor applydiff to allow for mempatching
r10966
Martin von Zweigbergk
patch: accept prefix argument to changedfiles() helper...
r35053 def _canonprefix(repo, prefix):
if prefix:
prefix = pathutil.canonpath(repo.root, repo.getcwd(), prefix)
if prefix != '':
prefix += '/'
return prefix
Siddharth Agarwal
patch._applydiff: accept a prefix parameter...
r24246 def _applydiff(ui, fp, patcher, backend, store, strip=1, prefix='',
Patrick Mezard
patch: use temporary files to handle intermediate copies...
r14452 eolmode='strict'):
Martin von Zweigbergk
patch: accept prefix argument to changedfiles() helper...
r35053 prefix = _canonprefix(backend.repo, prefix)
Patrick Mezard
patch: stop modifying gitpatch objects...
r14389 def pstrip(p):
Siddharth Agarwal
patch._applydiff: accept a prefix parameter...
r24246 return pathtransform(p, strip - 1, prefix)[1]
Patrick Mezard
patch: stop modifying gitpatch objects...
r14389
Patrick Mezard
patch: move diff parsing in iterhunks generator
r5650 rejects = 0
err = 0
current_file = None
Idan Kamara
patch: remove unused ui arg to iterhunks
r14240 for state, values in iterhunks(fp):
Patrick Mezard
patch: move diff parsing in iterhunks generator
r5650 if state == 'hunk':
if not current_file:
continue
Mads Kiilerich
patch: minor cleanup of _applydiff
r11021 ret = current_file.apply(values)
Patrick Mezard
patch: stop updating changed files set in applydiff()...
r14565 if ret > 0:
err = 1
Patrick Mezard
patch: move diff parsing in iterhunks generator
r5650 elif state == 'file':
Patrick Mezard
patch: move closefile() into patchfile.close()
r13701 if current_file:
rejects += current_file.close()
Patrick Mezard
patch: stop handling hunkless git blocks out of stream...
r14388 current_file = None
afile, bfile, first_hunk, gp = values
if gp:
Patrick Mezard
patch: generalize the use of patchmeta in applydiff()...
r14566 gp.path = pstrip(gp.path)
Patrick Mezard
patch: use temporary files to handle intermediate copies...
r14452 if gp.oldpath:
Patrick Mezard
patch: generalize the use of patchmeta in applydiff()...
r14566 gp.oldpath = pstrip(gp.oldpath)
else:
Siddharth Agarwal
patch._applydiff: accept a prefix parameter...
r24246 gp = makepatchmeta(backend, afile, bfile, first_hunk, strip,
prefix)
Patrick Mezard
patch: generalize the use of patchmeta in applydiff()...
r14566 if gp.op == 'RENAME':
backend.unlink(gp.oldpath)
Patrick Mezard
patch: stop handling hunkless git blocks out of stream...
r14388 if not first_hunk:
Patrick Mezard
patch: generalize the use of patchmeta in applydiff()...
r14566 if gp.op == 'DELETE':
backend.unlink(gp.path)
continue
data, mode = None, None
if gp.op in ('RENAME', 'COPY'):
Patrick Mezard
patch: extend filtestore to store an optional copy source...
r14609 data, mode = store.getfile(gp.oldpath)[:2]
Ryan McElroy
import: abort instead of crashing when copy source does not exist (issue5375)...
r30078 if data is None:
# This means that the old path does not exist
raise PatchError(_("source file '%s' does not exist")
% gp.oldpath)
Patrick Mezard
patch: generalize the use of patchmeta in applydiff()...
r14566 if gp.mode:
mode = gp.mode
if gp.op == 'ADD':
# Added files without content have no hunk and
# must be created
data = ''
if data or mode:
if (gp.op in ('ADD', 'RENAME', 'COPY')
and backend.exists(gp.path)):
raise PatchError(_("cannot create %s: destination "
"already exists") % gp.path)
backend.setfile(gp.path, data, mode, gp.oldpath)
Patrick Mezard
patch: stop handling hunkless git blocks out of stream...
r14388 continue
Patrick Mezard
patch: move diff parsing in iterhunks generator
r5650 try:
Patrick Mezard
patch: generalize the use of patchmeta in applydiff()...
r14566 current_file = patcher(ui, gp, backend, store,
eolmode=eolmode)
Gregory Szorc
global: mass rewrite to use modern exception syntax...
r25660 except PatchError as inst:
Martin Geisler
patch: fix clash between local variable and exception instance...
r14218 ui.warn(str(inst) + '\n')
Mads Kiilerich
patch: minor cleanup of _applydiff
r11021 current_file = None
Patrick Mezard
patch: move diff parsing in iterhunks generator
r5650 rejects += 1
continue
elif state == 'git':
Mads Kiilerich
patch: minor cleanup of _applydiff
r11021 for gp in values:
Patrick Mezard
patch: use temporary files to handle intermediate copies...
r14452 path = pstrip(gp.oldpath)
Mads Kiilerich
convert: use None value for missing files instead of overloading IOError...
r22296 data, mode = backend.getfile(path)
if data is None:
Patrick Mezard
patch: keep patching after missing copy source (issue3480)...
r16813 # The error ignored here will trigger a getfile()
# error in a place more appropriate for error
# handling, and will not interrupt the patching
# process.
Mads Kiilerich
convert: use None value for missing files instead of overloading IOError...
r22296 pass
Patrick Mezard
patch: keep patching after missing copy source (issue3480)...
r16813 else:
store.setfile(path, data, mode)
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 else:
Pierre-Yves David
error: get Abort from 'error' instead of 'util'...
r26587 raise error.Abort(_('unsupported parser state: %s') % state)
Patrick Mezard
patch: move NoHunk detection up with parsing code
r5649
Patrick Mezard
patch: move closefile() into patchfile.close()
r13701 if current_file:
rejects += current_file.close()
Patrick Mezard
patch: move diff parsing in iterhunks generator
r5650
Bryan O'Sullivan
Add Chris Mason's mpatch library....
r4897 if rejects:
return -1
return err
Vadim Gelfer
refactor text diff/patch code....
r2874
Patrick Mezard
patch: remove patch.patch() cwd argument
r14382 def _externalpatch(ui, repo, patcher, patchname, strip, files,
Patrick Mezard
patch: merge _updatedir() into externalpatch()
r14381 similarity):
Patrick Mezard
patch: change functions definition order for readability
r7151 """use <patcher> to apply <patchname> to the working directory.
returns whether patch was applied with fuzz factor."""
fuzz = False
Patrick Mezard
patch: simplify externalpatch() arguments
r12673 args = []
Patrick Mezard
patch: remove patch.patch() cwd argument
r14382 cwd = repo.root
Patrick Mezard
patch: change functions definition order for readability
r7151 if cwd:
Yuya Nishihara
procutil: bulk-replace function calls to point to new module
r37138 args.append('-d %s' % procutil.shellquote(cwd))
Yuya Nishihara
procutil: always popen() in binary mode...
r37476 cmd = ('%s %s -p%d < %s'
% (patcher, ' '.join(args), strip, procutil.shellquote(patchname)))
Augie Fackler
patch: add debug message to show external patch tool invocation...
r38103 ui.debug('Using external patch tool: %s\n' % cmd)
Yuya Nishihara
procutil: always popen() in binary mode...
r37476 fp = procutil.popen(cmd, 'rb')
Patrick Mezard
patch: merge _updatedir() into externalpatch()
r14381 try:
Jun Wu
patch: migrate to util.iterfile
r30397 for line in util.iterfile(fp):
Patrick Mezard
patch: merge _updatedir() into externalpatch()
r14381 line = line.rstrip()
ui.note(line + '\n')
if line.startswith('patching file '):
pf = util.parsepatchoutput(line)
printed_file = False
Patrick Mezard
patch: turn patch() touched files dict into a set
r14564 files.add(pf)
Patrick Mezard
patch: merge _updatedir() into externalpatch()
r14381 elif line.find('with fuzz') >= 0:
fuzz = True
if not printed_file:
ui.warn(pf + '\n')
printed_file = True
ui.warn(line + '\n')
elif line.find('saving rejects to file') >= 0:
ui.warn(line + '\n')
elif line.find('FAILED') >= 0:
if not printed_file:
ui.warn(pf + '\n')
printed_file = True
ui.warn(line + '\n')
finally:
if files:
Siddharth Agarwal
patch: use scmutil.marktouched instead of scmutil.addremove...
r19155 scmutil.marktouched(repo, files, similarity)
Patrick Mezard
patch: change functions definition order for readability
r7151 code = fp.close()
if code:
raise PatchError(_("patch command failed: %s") %
Yuya Nishihara
procutil: make explainexit() simply return a message (API)...
r37481 procutil.explainexit(code))
Patrick Mezard
patch: change functions definition order for readability
r7151 return fuzz
Siddharth Agarwal
patch.patchbackend: accept a prefix parameter
r24253 def patchbackend(ui, backend, patchobj, strip, prefix, files=None,
eolmode='strict'):
Benoit Boissinot
patch: don't use mutable object as default argument
r9683 if files is None:
Patrick Mezard
patch: turn patch() touched files dict into a set
r14564 files = set()
Patrick Mezard
Add patch.eol to ignore EOLs when patching (issue1019)...
r8810 if eolmode is None:
configitems: register the 'patch.eol' config
r33226 eolmode = ui.config('patch', 'eol')
Martin Geisler
patch: propagate eolmode down to patchfile...
r10101 if eolmode.lower() not in eolmodes:
Pierre-Yves David
error: get Abort from 'error' instead of 'util'...
r26587 raise error.Abort(_('unsupported line endings type: %s') % eolmode)
Martin Geisler
patch: propagate eolmode down to patchfile...
r10101 eolmode = eolmode.lower()
Dirkjan Ochtman
kill trailing whitespace
r8843
Patrick Mezard
patch: use temporary files to handle intermediate copies...
r14452 store = filestore()
Patrick Mezard
patch: change functions definition order for readability
r7151 try:
Alejandro Santos
compat: use open() instead of file() everywhere
r9031 fp = open(patchobj, 'rb')
Patrick Mezard
patch: change functions definition order for readability
r7151 except TypeError:
fp = patchobj
try:
Siddharth Agarwal
patch.patchbackend: accept a prefix parameter
r24253 ret = applydiff(ui, fp, backend, store, strip=strip, prefix=prefix,
Patrick Mezard
patch: use temporary files to handle intermediate copies...
r14452 eolmode=eolmode)
Patrick Mezard
patch: change functions definition order for readability
r7151 finally:
Patrick Mezard
patch: explicitely close input patch files when leaving...
r10203 if fp != patchobj:
fp.close()
Patrick Mezard
patch: turn patch() touched files dict into a set
r14564 files.update(backend.close())
Patrick Mezard
patch: use temporary files to handle intermediate copies...
r14452 store.close()
Patrick Mezard
patch: change functions definition order for readability
r7151 if ret < 0:
Patrick Mezard
patch: always raise PatchError with a message, simplify handling
r12674 raise PatchError(_('patch failed to apply'))
Patrick Mezard
patch: change functions definition order for readability
r7151 return ret > 0
Siddharth Agarwal
patch.internalpatch: add a default value for prefix...
r24268 def internalpatch(ui, repo, patchobj, strip, prefix='', files=None,
Siddharth Agarwal
patch.internalpatch: accept a prefix parameter
r24254 eolmode='strict', similarity=0):
Patrick Mezard
import: add --bypass option...
r14611 """use builtin patch to apply <patchobj> to the working directory.
returns whether patch was applied with fuzz factor."""
backend = workingbackend(ui, repo, similarity)
Siddharth Agarwal
patch.internalpatch: accept a prefix parameter
r24254 return patchbackend(ui, backend, patchobj, strip, prefix, files, eolmode)
Patrick Mezard
import: add --bypass option...
r14611
Siddharth Agarwal
cmdutil.tryimportone: allow importing relative patches with --bypass
r24260 def patchrepo(ui, repo, ctx, store, patchobj, strip, prefix, files=None,
Patrick Mezard
import: add --bypass option...
r14611 eolmode='strict'):
backend = repobackend(ui, repo, ctx, store)
Siddharth Agarwal
cmdutil.tryimportone: allow importing relative patches with --bypass
r24260 return patchbackend(ui, backend, patchobj, strip, prefix, files, eolmode)
Patrick Mezard
import: add --bypass option...
r14611
Siddharth Agarwal
cmdutil.tryimportone: allow importing relative patches into the working dir...
r24259 def patch(ui, repo, patchname, strip=1, prefix='', files=None, eolmode='strict',
Patrick Mezard
patch: make patch()/internalpatch() always update the dirstate
r14260 similarity=0):
Patrick Mezard
Add patch.eol to ignore EOLs when patching (issue1019)...
r8810 """Apply <patchname> to the working directory.
'eolmode' specifies how end of lines should be handled. It can be:
- 'strict': inputs are read in binary mode, EOLs are preserved
- 'crlf': EOLs are ignored when patching and reset to CRLF
- 'lf': EOLs are ignored when patching and reset to LF
- None: get it from user settings, default to 'strict'
'eolmode' is ignored when using an external patcher program.
Returns whether patch was applied with fuzz factor.
"""
Patrick Mezard
patch: change functions definition order for readability
r7151 patcher = ui.config('ui', 'patch')
Benoit Boissinot
patch: don't use mutable object as default argument
r9683 if files is None:
Patrick Mezard
patch: turn patch() touched files dict into a set
r14564 files = set()
Pierre-Yves David
import: add --partial flag to create a changeset despite failed hunks...
r21553 if patcher:
return _externalpatch(ui, repo, patcher, patchname, strip,
files, similarity)
Siddharth Agarwal
cmdutil.tryimportone: allow importing relative patches into the working dir...
r24259 return internalpatch(ui, repo, patchname, strip, prefix, files, eolmode,
Pierre-Yves David
import: add --partial flag to create a changeset despite failed hunks...
r21553 similarity)
Patrick Mezard
patch: change functions definition order for readability
r7151
Martin von Zweigbergk
patch: accept prefix argument to changedfiles() helper...
r35053 def changedfiles(ui, repo, patchpath, strip=1, prefix=''):
Patrick Mezard
patch: add lexists() to backends, use it in selectfile()...
r14351 backend = fsbackend(ui, repo.root)
Martin von Zweigbergk
patch: accept prefix argument to changedfiles() helper...
r35053 prefix = _canonprefix(repo, prefix)
Bryan O'Sullivan
with: use context manager for I/O in changedfiles in patch
r27796 with open(patchpath, 'rb') as fp:
Idan Kamara
patch: introduce changedfiles...
r14255 changed = set()
for state, values in iterhunks(fp):
Patrick Mezard
patch: stop modifying gitpatch objects...
r14389 if state == 'file':
Patrick Mezard
patch: stop handling hunkless git blocks out of stream...
r14388 afile, bfile, first_hunk, gp = values
if gp:
Martin von Zweigbergk
patch: accept prefix argument to changedfiles() helper...
r35053 gp.path = pathtransform(gp.path, strip - 1, prefix)[1]
Patrick Mezard
patch: generalize the use of patchmeta in applydiff()...
r14566 if gp.oldpath:
Martin von Zweigbergk
patch: accept prefix argument to changedfiles() helper...
r35053 gp.oldpath = pathtransform(gp.oldpath, strip - 1,
prefix)[1]
Patrick Mezard
patch: generalize the use of patchmeta in applydiff()...
r14566 else:
Siddharth Agarwal
patch.makepatchmeta: accept a prefix parameter...
r24245 gp = makepatchmeta(backend, afile, bfile, first_hunk, strip,
Martin von Zweigbergk
patch: accept prefix argument to changedfiles() helper...
r35053 prefix)
Patrick Mezard
patch: generalize the use of patchmeta in applydiff()...
r14566 changed.add(gp.path)
if gp.op == 'RENAME':
changed.add(gp.oldpath)
Patrick Mezard
patch: stop modifying gitpatch objects...
r14389 elif state not in ('hunk', 'git'):
Pierre-Yves David
error: get Abort from 'error' instead of 'util'...
r26587 raise error.Abort(_('unsupported parser state: %s') % state)
Idan Kamara
patch: introduce changedfiles...
r14255 return changed
Patrick Mezard
patch: support diff data loss detection and upgrade...
r10189 class GitDiffRequired(Exception):
pass
Dirkjan Ochtman
patch: extract local function addmodehdr
r7198
Yuya Nishihara
diffutil: remove diffopts() in favor of diffallopts()...
r38606 diffopts = diffutil.diffallopts
Boris Feld
diffutil: extract diff options code into a dedicated util-module...
r38581 diffallopts = diffutil.diffallopts
difffeatureopts = diffutil.difffeatureopts
Benoit Boissinot
patch/diff: move diff related code next to each other
r10615
Denis Laxalde
patch: add a diffhunks function yielding (diffheaders, hunks)...
r31274 def diff(repo, node1=None, node2=None, match=None, changes=None,
Denis Laxalde
diff: pass a diff hunks filter function from changeset_printer to patch.diff()...
r34857 opts=None, losedatafn=None, prefix='', relroot='', copy=None,
hunksfilterfn=None):
Dirkjan Ochtman
patch: turn patch.diff() into a generator...
r7308 '''yields diff of changes to files between two nodes, or node and
Vadim Gelfer
refactor text diff/patch code....
r2874 working directory.
if node1 is None, use first dirstate parent instead.
Patrick Mezard
patch: support diff data loss detection and upgrade...
r10189 if node2 is None, compare node1 with working directory.
losedatafn(**kwarg) is a callable run when opts.upgrade=True and
every time some change cannot be represented with the current
patch format. Return False to upgrade to git patch format, True to
accept the loss or raise an exception to abort the diff. It is
called with the name of current file being diffed as 'fn'. If set
to None, patches will always be upgraded to git format when
necessary.
Martin Geisler
diff: recurse into subrepositories with --subrepos/-S flag
r12167
prefix is a filename prefix that is prepended to all filenames on
display (used for subrepos).
Siddharth Agarwal
patch.diff: add support for diffs relative to a subdirectory...
r24417
relroot, if not empty, must be normalized with a trailing /. Any match
Henrik Stuart
patch: allow copy information to be passed in...
r29422 patterns that fall outside it will be ignored.
copy, if not empty, should contain mappings {dst@y: src@x} of copy
Denis Laxalde
diff: pass a diff hunks filter function from changeset_printer to patch.diff()...
r34857 information.
hunksfilterfn, if not None, should be a function taking a filectx and
hunks generator that may yield filtered hunks.
'''
Denis Laxalde
diff: also yield file context objects in patch.trydiff() (API)...
r34856 for fctx1, fctx2, hdr, hunks in diffhunks(
repo, node1=node1, node2=node2,
match=match, changes=changes, opts=opts,
losedatafn=losedatafn, prefix=prefix, relroot=relroot, copy=copy,
):
Denis Laxalde
diff: pass a diff hunks filter function from changeset_printer to patch.diff()...
r34857 if hunksfilterfn is not None:
Denis Laxalde
log: add an assertion about fctx not being None in patch.diff()...
r34909 # If the file has been removed, fctx2 is None; but this should
# not occur here since we catch removed files early in
Yuya Nishihara
cmdutil: drop aliases for logcmdutil functions (API)...
r35906 # logcmdutil.getlinerangerevs() for 'hg log -L'.
Denis Laxalde
log: add an assertion about fctx not being None in patch.diff()...
r34909 assert fctx2 is not None, \
'fctx2 unexpectly None in diff hunks filtering'
Denis Laxalde
diff: pass a diff hunks filter function from changeset_printer to patch.diff()...
r34857 hunks = hunksfilterfn(fctx2, hunks)
Denis Laxalde
patch: add a diffhunks function yielding (diffheaders, hunks)...
r31274 text = ''.join(sum((list(hlines) for hrange, hlines in hunks), []))
Denis Laxalde
patch: rename "header" variable into "hdr" in diff()...
r34562 if hdr and (text or len(hdr) > 1):
yield '\n'.join(hdr) + '\n'
Denis Laxalde
patch: add a diffhunks function yielding (diffheaders, hunks)...
r31274 if text:
yield text
def diffhunks(repo, node1=None, node2=None, match=None, changes=None,
opts=None, losedatafn=None, prefix='', relroot='', copy=None):
"""Yield diff of changes to files in the form of (`header`, `hunks`) tuples
where `header` is a list of diff headers and `hunks` is an iterable of
(`hunkrange`, `hunklines`) tuples.
See diff() for the meaning of parameters.
"""
Vadim Gelfer
refactor text diff/patch code....
r2874
if opts is None:
opts = mdiff.defaultopts
Yannick Gingras
diff: add --inverse option...
r9725 if not node1 and not node2:
Matt Mackall
misc: replace .parents()[0] with p1()
r13878 node1 = repo.dirstate.p1()
Brendan Cully
Teach mq about git patches
r2934
Brendan Cully
Make patch.diff filelog cache LRU of 20 files. Fixes issue1738....
r9123 def lrugetfilectx():
cache = {}
Martin von Zweigbergk
util: drop alias for collections.deque...
r25113 order = collections.deque()
Brendan Cully
Make patch.diff filelog cache LRU of 20 files. Fixes issue1738....
r9123 def getfilectx(f, ctx):
fctx = ctx.filectx(f, filelog=cache.get(f))
if f not in cache:
if len(cache) > 20:
Bryan O'Sullivan
cleanup: use the deque type where appropriate...
r16803 del cache[order.popleft()]
Benoit Boissinot
patch: use the public ctx API instead of the internals
r9684 cache[f] = fctx.filelog()
Brendan Cully
Make patch.diff filelog cache LRU of 20 files. Fixes issue1738....
r9123 else:
order.remove(f)
order.append(f)
return fctx
return getfilectx
getfilectx = lrugetfilectx()
Brendan Cully
Teach mq about git patches
r2934
Matt Mackall
use repo[changeid] to get a changectx
r6747 ctx1 = repo[node1]
Matt Mackall
diff: pass contexts to status...
r7090 ctx2 = repo[node2]
Vadim Gelfer
refactor text diff/patch code....
r2874
Siddharth Agarwal
patch.diff: restrict matcher to relative root in certain cases...
r24433 relfiltered = False
if relroot != '' and match.always():
# as a special case, create a new matcher with just the relroot
pats = [relroot]
match = scmutil.match(ctx2, pats, default='path')
relfiltered = True
Vadim Gelfer
refactor text diff/patch code....
r2874 if not changes:
Matt Mackall
diff: pass contexts to status...
r7090 changes = repo.status(ctx1, ctx2, match=match)
Matt Mackall
status: clean up all users for unknown files
r6760 modified, added, removed = changes[:3]
Vadim Gelfer
refactor text diff/patch code....
r2874
if not modified and not added and not removed:
Patrick Mezard
patch: support diff data loss detection and upgrade...
r10189 return []
Jordi Gutiérrez Hermoso
style: kill ersatz if-else ternary operators...
r24306 if repo.ui.debugflag:
hexfunc = hex
else:
hexfunc = short
Sean Farley
patch: use ctx.node() instead of bare node variable...
r21833 revs = [hexfunc(node) for node in [ctx1.node(), ctx2.node()] if node]
Patrick Mezard
patch: support diff data loss detection and upgrade...
r10189
Henrik Stuart
patch: allow copy information to be passed in...
r29422 if copy is None:
copy = {}
if opts.git or opts.upgrade:
copy = copies.pathcopies(ctx1, ctx2, match=match)
Patrick Mezard
patch: support diff data loss detection and upgrade...
r10189
Siddharth Agarwal
patch.diff: add support for diffs relative to a subdirectory...
r24417 if relroot is not None:
Siddharth Agarwal
patch.diff: restrict matcher to relative root in certain cases...
r24433 if not relfiltered:
# XXX this would ideally be done in the matcher, but that is
# generally meant to 'or' patterns, not 'and' them. In this case we
# need to 'and' all the patterns from the matcher with relroot.
def filterrel(l):
return [f for f in l if f.startswith(relroot)]
modified = filterrel(modified)
added = filterrel(added)
removed = filterrel(removed)
relfiltered = True
Siddharth Agarwal
patch.diff: add support for diffs relative to a subdirectory...
r24417 # filter out copies where either side isn't inside the relative root
copy = dict(((dst, src) for (dst, src) in copy.iteritems()
if dst.startswith(relroot)
and src.startswith(relroot)))
Martin von Zweigbergk
diff: move status fixup earlier, out of _filepairs()...
r27900 modifiedset = set(modified)
addedset = set(added)
Martin von Zweigbergk
diff: don't crash when merged-in addition was removed (issue4786)...
r27901 removedset = set(removed)
Martin von Zweigbergk
diff: move status fixup earlier, out of _filepairs()...
r27900 for f in modified:
if f not in ctx1:
# Fix up added, since merged-in additions appear as
# modifications during merges
modifiedset.remove(f)
addedset.add(f)
Martin von Zweigbergk
diff: don't crash when merged-in addition was removed (issue4786)...
r27901 for f in removed:
if f not in ctx1:
# Merged-in additions that are then removed are reported as removed.
# They are not in ctx1, so We don't want to show them in the diff.
removedset.remove(f)
Martin von Zweigbergk
diff: move status fixup earlier, out of _filepairs()...
r27900 modified = sorted(modifiedset)
added = sorted(addedset)
Martin von Zweigbergk
diff: don't crash when merged-in addition was removed (issue4786)...
r27901 removed = sorted(removedset)
Pulkit Goyal
py3: use list() to get a list of items using dict.items()...
r35607 for dst, src in list(copy.items()):
Martin von Zweigbergk
diff: don't crash when merged-in addition is copied...
r27902 if src not in ctx1:
# Files merged in during a merge and then copied/renamed are
# reported as copies. We want to show them in the diff as additions.
del copy[dst]
Martin von Zweigbergk
diff: move status fixup earlier, out of _filepairs()...
r27900
Matt Harbison
diff: invoke the file prefetch hook...
r37782 prefetchmatch = scmutil.matchfiles(
repo, list(modifiedset | addedset | removedset))
scmutil.prefetchfiles(repo, [ctx1.rev(), ctx2.rev()], prefetchmatch)
Mads Kiilerich
check-code: indent 4 spaces in py files
r17299 def difffn(opts, losedata):
return trydiff(repo, revs, ctx1, ctx2, modified, added, removed,
Siddharth Agarwal
patch.diff: add support for diffs relative to a subdirectory...
r24417 copy, getfilectx, opts, losedata, prefix, relroot)
Patrick Mezard
patch: support diff data loss detection and upgrade...
r10189 if opts.upgrade and not opts.git:
try:
def losedata(fn):
if not losedatafn or not losedatafn(fn=fn):
Brodie Rao
cleanup: "raise SomeException()" -> "raise SomeException"
r16687 raise GitDiffRequired
Patrick Mezard
patch: support diff data loss detection and upgrade...
r10189 # Buffer the whole output until we are sure it can be generated
return list(difffn(opts.copy(git=False), losedata))
except GitDiffRequired:
return difffn(opts.copy(git=True), None)
else:
return difffn(opts, None)
Jun Wu
patch: buffer lines for a same hunk...
r37749 def diffsinglehunk(hunklines):
"""yield tokens for a list of lines in a single hunk"""
for line in hunklines:
# chomp
chompline = line.rstrip('\n')
# highlight tabs and trailing whitespace
stripline = chompline.rstrip()
Pulkit Goyal
py3: use .startswith() instead of bytes[0]...
r38059 if line.startswith('-'):
Jun Wu
patch: buffer lines for a same hunk...
r37749 label = 'diff.deleted'
Pulkit Goyal
py3: use .startswith() instead of bytes[0]...
r38059 elif line.startswith('+'):
Jun Wu
patch: buffer lines for a same hunk...
r37749 label = 'diff.inserted'
else:
raise error.ProgrammingError('unexpected hunk line: %s' % line)
for token in tabsplitter.findall(stripline):
Pulkit Goyal
py3: use .startswith() instead of bytes[0]...
r38059 if token.startswith('\t'):
Jun Wu
patch: buffer lines for a same hunk...
r37749 yield (token, 'diff.tab')
else:
yield (token, label)
if chompline != stripline:
yield (chompline[len(stripline):], 'diff.trailingwhitespace')
if chompline != line:
yield (line[len(chompline):], '')
Jun Wu
patch: implement a new worddiff algorithm...
r37750 def diffsinglehunkinline(hunklines):
"""yield tokens for a list of lines in a single hunk, with inline colors"""
# prepare deleted, and inserted content
a = ''
b = ''
for line in hunklines:
if line[0] == '-':
a += line[1:]
elif line[0] == '+':
b += line[1:]
else:
raise error.ProgrammingError('unexpected hunk line: %s' % line)
# fast path: if either side is empty, use diffsinglehunk
if not a or not b:
for t in diffsinglehunk(hunklines):
yield t
return
# re-split the content into words
al = wordsplitter.findall(a)
bl = wordsplitter.findall(b)
# re-arrange the words to lines since the diff algorithm is line-based
aln = [s if s == '\n' else s + '\n' for s in al]
bln = [s if s == '\n' else s + '\n' for s in bl]
an = ''.join(aln)
bn = ''.join(bln)
# run the diff algorithm, prepare atokens and btokens
atokens = []
btokens = []
blocks = mdiff.allblocks(an, bn, lines1=aln, lines2=bln)
for (a1, a2, b1, b2), btype in blocks:
changed = btype == '!'
for token in mdiff.splitnewlines(''.join(al[a1:a2])):
atokens.append((changed, token))
for token in mdiff.splitnewlines(''.join(bl[b1:b2])):
btokens.append((changed, token))
# yield deleted tokens, then inserted ones
for prefix, label, tokens in [('-', 'diff.deleted', atokens),
('+', 'diff.inserted', btokens)]:
nextisnewline = True
for changed, token in tokens:
if nextisnewline:
yield (prefix, label)
nextisnewline = False
# special handling line end
isendofline = token.endswith('\n')
if isendofline:
chomp = token[:-1] # chomp
token = chomp.rstrip() # detect spaces at the end
endspaces = chomp[len(token):]
# scan tabs
for maybetab in tabsplitter.findall(token):
if '\t' == maybetab[0]:
currentlabel = 'diff.tab'
else:
if changed:
currentlabel = label + '.changed'
else:
currentlabel = label + '.unchanged'
yield (maybetab, currentlabel)
if isendofline:
if endspaces:
yield (endspaces, 'diff.trailingwhitespace')
yield ('\n', '')
nextisnewline = True
Brodie Rao
diff: make use of output labeling
r10818 def difflabel(func, *args, **kw):
'''yields 2-tuples of (output, label) based on the output of func()'''
Jun Wu
patch: implement a new worddiff algorithm...
r37750 if kw.get(r'opts') and kw[r'opts'].worddiff:
dodiffhunk = diffsinglehunkinline
else:
dodiffhunk = diffsinglehunk
Kirill Elagin
diff: enhance highlighting with color (issue3034)...
r15201 headprefixes = [('diff', 'diff.diffline'),
('copy', 'diff.extended'),
('rename', 'diff.extended'),
('old', 'diff.extended'),
('new', 'diff.extended'),
('deleted', 'diff.extended'),
Sean Farley
patch: add label for coloring the index extended header...
r30790 ('index', 'diff.extended'),
Sean Farley
patch: add label for coloring the similarity extended header...
r30808 ('similarity', 'diff.extended'),
Kirill Elagin
diff: enhance highlighting with color (issue3034)...
r15201 ('---', 'diff.file_a'),
('+++', 'diff.file_b')]
textprefixes = [('@', 'diff.hunk'),
Jun Wu
patch: buffer lines for a same hunk...
r37749 # - and + are handled by diffsinglehunk
]
Kirill Elagin
diff: enhance highlighting with color (issue3034)...
r15201 head = False
Jun Wu
patch: buffer lines for a same hunk...
r37749
# buffers a hunk, i.e. adjacent "-", "+" lines without other changes.
hunkbuffer = []
def consumehunkbuffer():
if hunkbuffer:
Jun Wu
patch: implement a new worddiff algorithm...
r37750 for token in dodiffhunk(hunkbuffer):
Jun Wu
patch: buffer lines for a same hunk...
r37749 yield token
hunkbuffer[:] = []
Brodie Rao
diff: make use of output labeling
r10818 for chunk in func(*args, **kw):
lines = chunk.split('\n')
Jun Wu
patch: move yielding "\n" to the end of loop...
r37748 linecount = len(lines)
Brodie Rao
diff: make use of output labeling
r10818 for i, line in enumerate(lines):
Kirill Elagin
diff: enhance highlighting with color (issue3034)...
r15201 if head:
if line.startswith('@'):
head = False
else:
Yuya Nishihara
py3: use s.startswith() instead of s[n] while parsing patches...
r37489 if line and not line.startswith((' ', '+', '-', '@', '\\')):
Kirill Elagin
diff: enhance highlighting with color (issue3034)...
r15201 head = True
Jordi Gutiérrez Hermoso
patch: enable diff.tab markup for the color extension...
r22460 diffline = False
Yuya Nishihara
py3: use s.startswith() instead of s[n] while parsing patches...
r37489 if not head and line and line.startswith(('+', '-')):
Jordi Gutiérrez Hermoso
patch: enable diff.tab markup for the color extension...
r22460 diffline = True
Kirill Elagin
diff: enhance highlighting with color (issue3034)...
r15201 prefixes = textprefixes
if head:
prefixes = headprefixes
Jun Wu
patch: buffer lines for a same hunk...
r37749 if diffline:
# buffered
bufferedline = line
if i + 1 < linecount:
bufferedline += "\n"
hunkbuffer.append(bufferedline)
Brodie Rao
diff: make use of output labeling
r10818 else:
Jun Wu
patch: buffer lines for a same hunk...
r37749 # unbuffered
for token in consumehunkbuffer():
yield token
stripline = line.rstrip()
for prefix, label in prefixes:
if stripline.startswith(prefix):
yield (stripline, label)
if line != stripline:
yield (line[len(stripline):],
'diff.trailingwhitespace')
Matthieu Laneuville
patch: add within-line color diff capacity...
r35278 break
Jun Wu
patch: buffer lines for a same hunk...
r37749 else:
yield (line, '')
if i + 1 < linecount:
yield ('\n', '')
for token in consumehunkbuffer():
yield token
Matthieu Laneuville
patch: add within-line color diff capacity...
r35278
Brodie Rao
diff: make use of output labeling
r10818 def diffui(*args, **kw):
'''like diff(), but yields 2-tuples of (output, label) for ui.write()'''
return difflabel(diff, *args, **kw)
Martin von Zweigbergk
diff: move status fixup earlier, out of _filepairs()...
r27900 def _filepairs(modified, added, removed, copy, opts):
Martin von Zweigbergk
trydiff: extract function that generates filename pairs...
r24106 '''generates tuples (f1, f2, copyop), where f1 is the name of the file
before and f2 is the the name after. For added files, f1 will be None,
and for removed files, f2 will be None. copyop may be set to None, 'copy'
or 'rename' (the latter two only if opts.git is set).'''
gone = set()
copyto = dict([(v, k) for k, v in copy.items()])
addedset, removedset = set(added), set(removed)
for f in sorted(modified + added + removed):
copyop = None
f1, f2 = f, f
if f in addedset:
f1 = None
if f in copy:
if opts.git:
f1 = copy[f]
if f1 in removedset and f1 not in gone:
copyop = 'rename'
gone.add(f1)
else:
copyop = 'copy'
elif f in removedset:
f2 = None
if opts.git:
# have we already reported a copy above?
if (f in copyto and copyto[f] in addedset
and copy[copyto[f]] == f):
continue
yield f1, f2, copyop
Patrick Mezard
patch: support diff data loss detection and upgrade...
r10189 def trydiff(repo, revs, ctx1, ctx2, modified, added, removed,
Siddharth Agarwal
patch.trydiff: add support for stripping a relative root...
r24416 copy, getfilectx, opts, losedatafn, prefix, relroot):
Siddharth Agarwal
patch.trydiff: add a docstring...
r24371 '''given input data, generate a diff and yield it in blocks
If generating a diff would lose data like flags or binary data and
losedatafn is not None, it will be called.
Siddharth Agarwal
patch.trydiff: add support for stripping a relative root...
r24416 relroot is removed and prefix is added to every path in the diff output.
If relroot is not empty, this function expects every path in modified,
added, removed and copy to start with it.'''
Martin Geisler
diff: recurse into subrepositories with --subrepos/-S flag
r12167
Guillermo Pérez
diff: move index header generation to patch...
r17946 def gitindex(text):
if not text:
Johan Bjork
patch: Fix nullid for binary git diffs (issue4054)...
r19875 text = ""
Guillermo Pérez
diff: move index header generation to patch...
r17946 l = len(text)
Augie Fackler
cleanup: replace uses of util.(md5|sha1|sha256|sha512) with hashlib.\1...
r29341 s = hashlib.sha1('blob %d\0' % l)
Guillermo Pérez
diff: move index header generation to patch...
r17946 s.update(text)
Pulkit Goyal
py3: use node.hex(h.digest()) instead of h.hexdigest()...
r35600 return hex(s.digest())
Guillermo Pérez
diff: move index header generation to patch...
r17946
Siddharth Agarwal
patch.trydiff: add support for noprefix
r23300 if opts.noprefix:
aprefix = bprefix = ''
else:
aprefix = 'a/'
bprefix = 'b/'
Martin von Zweigbergk
trydiff: remove unused argument to diffline()...
r24021 def diffline(f, revs):
Martin von Zweigbergk
trydiff: move check for quietness out of diffline()...
r24024 revinfo = ' '.join(["-r %s" % rev for rev in revs])
Martin von Zweigbergk
trydiff: join elements in 'header' list by '\n'...
r24025 return 'diff %s %s' % (revinfo, f)
Guillermo Pérez
diff: move diffline to patch module...
r17941
Jun Wu
diff: use fctx.size() to test empty...
r32188 def isempty(fctx):
return fctx is None or fctx.size() == 0
Boris Feld
util: extract all date-related utils in utils/dateutil module...
r36625 date1 = dateutil.datestr(ctx1.date())
date2 = dateutil.datestr(ctx2.date())
Benoit Boissinot
patch: use contexts for diff
r3967
Patrick Mezard
patch: support diff data loss detection and upgrade...
r10189 gitmode = {'l': '120000', 'x': '100755', '': '100644'}
Vadim Gelfer
refactor text diff/patch code....
r2874
Jun Wu
patch: use devel.all-warnings to replace devel.all...
r33495 if relroot != '' and (repo.ui.configbool('devel', 'all-warnings')
Siddharth Agarwal
patch.trydiff: add support for stripping a relative root...
r24416 or repo.ui.configbool('devel', 'check-relroot')):
Augie Fackler
patch: update copying of dict keys and values to work on Python 3
r33585 for f in modified + added + removed + list(copy) + list(copy.values()):
Siddharth Agarwal
patch.trydiff: add support for stripping a relative root...
r24416 if f is not None and not f.startswith(relroot):
raise AssertionError(
"file %s doesn't start with relroot %s" % (f, relroot))
Martin von Zweigbergk
diff: move status fixup earlier, out of _filepairs()...
r27900 for f1, f2, copyop in _filepairs(modified, added, removed, copy, opts):
Martin von Zweigbergk
trydiff: read file data in only one place...
r24105 content1 = None
content2 = None
Jun Wu
diff: use fctx.isbinary() to test binary...
r32187 fctx1 = None
fctx2 = None
Martin von Zweigbergk
trydiff: read flags in one place...
r24103 flag1 = None
flag2 = None
Martin von Zweigbergk
trydiff: transpose 'if opts.git or losedatafn' with 'if f[12]'...
r24107 if f1:
Jun Wu
diff: use fctx.isbinary() to test binary...
r32187 fctx1 = getfilectx(f1, ctx1)
Martin von Zweigbergk
trydiff: transpose 'if opts.git or losedatafn' with 'if f[12]'...
r24107 if opts.git or losedatafn:
flag1 = ctx1.flags(f1)
if f2:
Jun Wu
diff: use fctx.isbinary() to test binary...
r32187 fctx2 = getfilectx(f2, ctx2)
Martin von Zweigbergk
trydiff: transpose 'if opts.git or losedatafn' with 'if f[12]'...
r24107 if opts.git or losedatafn:
flag2 = ctx2.flags(f2)
Jun Wu
diff: correct binary testing logic...
r32190 # if binary is True, output "summary" or "base85", but not "text diff"
Joerg Sonnenberger
patch: avoid repeated binary checks if all files in a patch are text...
r35868 if opts.text:
binary = False
else:
Yuya Nishihara
patch: unify check_binary and binary flags...
r35969 binary = any(f.isbinary() for f in [fctx1, fctx2] if f is not None)
Martin von Zweigbergk
trydiff: collect all lossiness checks in one place...
r24057
if losedatafn and not opts.git:
if (binary or
# copy/rename
Martin von Zweigbergk
trydiff: extract function that generates filename pairs...
r24106 f2 in copy or
Martin von Zweigbergk
trydiff: collect all lossiness checks in one place...
r24057 # empty file creation
Jun Wu
diff: use fctx.size() to test empty...
r32188 (not f1 and isempty(fctx2)) or
Martin von Zweigbergk
trydiff: collect all lossiness checks in one place...
r24057 # empty file deletion
Jun Wu
diff: use fctx.size() to test empty...
r32188 (isempty(fctx1) and not f2) or
Martin von Zweigbergk
trydiff: collect all lossiness checks in one place...
r24057 # create with flags
Martin von Zweigbergk
trydiff: make filenames None when they don't exist...
r24101 (not f1 and flag2) or
Martin von Zweigbergk
trydiff: collect all lossiness checks in one place...
r24057 # change flags
Martin von Zweigbergk
trydiff: make filenames None when they don't exist...
r24101 (f1 and f2 and flag1 != flag2)):
Martin von Zweigbergk
trydiff: extract function that generates filename pairs...
r24106 losedatafn(f2 or f1)
Patrick Mezard
patch: support diff data loss detection and upgrade...
r10189
Siddharth Agarwal
patch.trydiff: add support for stripping a relative root...
r24416 path1 = f1 or f2
path2 = f2 or f1
path1 = posixpath.join(prefix, path1[len(relroot):])
path2 = posixpath.join(prefix, path2[len(relroot):])
Martin von Zweigbergk
trydiff: collect header-writing in one place...
r23998 header = []
Martin von Zweigbergk
trydiff: make 'revs' ignored if opts.git is set...
r24022 if opts.git:
Martin von Zweigbergk
trydiff: join elements in 'header' list by '\n'...
r24025 header.append('diff --git %s%s %s%s' %
Martin von Zweigbergk
trydiff: move git-header code out of diffline function...
r24020 (aprefix, path1, bprefix, path2))
Martin von Zweigbergk
trydiff: make filenames None when they don't exist...
r24101 if not f1: # added
Martin von Zweigbergk
trydiff: join elements in 'header' list by '\n'...
r24025 header.append('new file mode %s' % gitmode[flag2])
Martin von Zweigbergk
trydiff: make filenames None when they don't exist...
r24101 elif not f2: # removed
Martin von Zweigbergk
trydiff: join elements in 'header' list by '\n'...
r24025 header.append('deleted file mode %s' % gitmode[flag1])
Martin von Zweigbergk
trydiff: collect header-writing in one place...
r23998 else: # modified/copied/renamed
Martin von Zweigbergk
trydiff: inline sole addmodehdr() call...
r24000 mode1, mode2 = gitmode[flag1], gitmode[flag2]
if mode1 != mode2:
Martin von Zweigbergk
trydiff: join elements in 'header' list by '\n'...
r24025 header.append('old mode %s' % mode1)
header.append('new mode %s' % mode2)
Martin von Zweigbergk
trydiff: rename 'op' to make it more specific...
r24055 if copyop is not None:
Sean Farley
patch: use opt.showsimilarity to calculate and show the similarity...
r30807 if opts.showsimilarity:
sim = similar.score(ctx1[path1], ctx2[path2]) * 100
header.append('similarity index %d%%' % sim)
Martin von Zweigbergk
trydiff: rename 'op' to make it more specific...
r24055 header.append('%s from %s' % (copyop, path1))
header.append('%s to %s' % (copyop, path2))
Martin von Zweigbergk
trydiff: move check for quietness out of diffline()...
r24024 elif revs and not repo.ui.quiet:
Martin von Zweigbergk
trydiff: make 'revs' ignored if opts.git is set...
r24022 header.append(diffline(path1, revs))
Martin von Zweigbergk
trydiff: collect header-writing in one place...
r23998
Jun Wu
diff: draw a table about binary diff behaviors...
r32189 # fctx.is | diffopts | what to | is fctx.data()
# binary() | text nobinary git index | output? | outputted?
# ------------------------------------|----------------------------
# yes | no no no * | summary | no
# yes | no no yes * | base85 | yes
# yes | no yes no * | summary | no
# yes | no yes yes 0 | summary | no
# yes | no yes yes >0 | summary | semi [1]
# yes | yes * * * | text diff | yes
# no | * * * * | text diff | yes
# [1]: hash(fctx.data()) is outputted. so fctx.data() cannot be faked
Jun Wu
diff: add a fast path to avoid loading binary contents...
r32191 if binary and (not opts.git or (opts.git and opts.nobinary and not
opts.index)):
# fast path: no binary content will be displayed, content1 and
# content2 are only used for equivalent test. cmp() could have a
# fast path.
if fctx1 is not None:
content1 = b'\0'
if fctx2 is not None:
if fctx1 is not None and not fctx1.cmp(fctx2):
content2 = b'\0' # not different
else:
content2 = b'\0\0'
else:
# normal path: load contents
if fctx1 is not None:
content1 = fctx1.data()
if fctx2 is not None:
content2 = fctx2.data()
Jun Wu
diff: correct binary testing logic...
r32190 if binary and opts.git and not opts.nobinary:
Martin von Zweigbergk
trydiff: make variable names more consistent...
r23997 text = mdiff.b85diff(content1, content2)
Martin von Zweigbergk
trydiff: replace 'binarydiff' variable by 'binary' variable...
r24056 if text:
Martin von Zweigbergk
trydiff: join elements in 'header' list by '\n'...
r24025 header.append('index %s..%s' %
Martin von Zweigbergk
trydiff: inline indexmeta()...
r24005 (gitindex(content1), gitindex(content2)))
Denis Laxalde
patch: add a diffhunks function yielding (diffheaders, hunks)...
r31274 hunks = (None, [text]),
Martin von Zweigbergk
trydiff: replace 'dodiff = False' by 'continue'...
r23753 else:
Sean Farley
patch: add index line for diff output...
r30789 if opts.git and opts.index > 0:
flag = flag1
if flag is None:
flag = flag2
header.append('index %s..%s %s' %
(gitindex(content1)[0:opts.index],
gitindex(content2)[0:opts.index],
gitmode[flag]))
Denis Laxalde
mdiff: let unidiff return (diffheader, hunks)...
r31273 uheaders, hunks = mdiff.unidiff(content1, date1,
content2, date2,
Yuya Nishihara
patch: unify check_binary and binary flags...
r35969 path1, path2,
binary=binary, opts=opts)
Denis Laxalde
mdiff: distinguish diff headers from hunks in unidiff()...
r31271 header.extend(uheaders)
Denis Laxalde
diff: also yield file context objects in patch.trydiff() (API)...
r34856 yield fctx1, fctx2, header, hunks
Vadim Gelfer
refactor text diff/patch code....
r2874
Matt Mackall
patch: add diffstatsum helper
r14401 def diffstatsum(stats):
Steven Brown
patch: restore the previous output of 'diff --stat'...
r14437 maxfile, maxtotal, addtotal, removetotal, binary = 0, 0, 0, 0, False
Matt Mackall
patch: add diffstatsum helper
r14401 for f, a, r, b in stats:
maxfile = max(maxfile, encoding.colwidth(f))
Steven Brown
patch: restore the previous output of 'diff --stat'...
r14437 maxtotal = max(maxtotal, a + r)
Matt Mackall
patch: add diffstatsum helper
r14401 addtotal += a
removetotal += r
binary = binary or b
Steven Brown
patch: restore the previous output of 'diff --stat'...
r14437 return maxfile, maxtotal, addtotal, removetotal, binary
Matt Mackall
patch: add diffstatsum helper
r14401
Alexander Solovyov
python implementation of diffstat...
r7547 def diffstatdata(lines):
Gastón Kleiman
diffstat: fix parsing of filenames with spaces...
r13395 diffre = re.compile('^diff .*-r [a-z0-9]+\s(.*)$')
Matt Mackall
diffstatdata: no longer a generator...
r14400 results = []
Patrick Mezard
diffstat: be more picky when marking file as 'binary' (issue2816)...
r15363 filename, adds, removes, isbinary = None, 0, 0, False
Matt Mackall
diffstatdata: no longer a generator...
r14400
def addresult():
if filename:
results.append((filename, adds, removes, isbinary))
Andrew Zwicky
diffstat: properly count lines starting in '--' or '++' (issue5479)...
r32321 # inheader is used to track if a line is in the
# header portion of the diff. This helps properly account
# for lines that start with '--' or '++'
inheader = False
Alexander Solovyov
python implementation of diffstat...
r7547 for line in lines:
if line.startswith('diff'):
Matt Mackall
diffstatdata: no longer a generator...
r14400 addresult()
Andrew Zwicky
diffstat: properly count lines starting in '--' or '++' (issue5479)...
r32321 # starting a new file diff
# set numbers to 0 and reset inheader
inheader = True
Patrick Mezard
diffstat: be more picky when marking file as 'binary' (issue2816)...
r15363 adds, removes, isbinary = 0, 0, False
Sean Farley
patch: match 'diff --git a/' instead of 'diff --git'...
r18830 if line.startswith('diff --git a/'):
Matt Mackall
diff: use second filename for --stat reporting on git patches (issue4221)
r20972 filename = gitre.search(line).group(2)
Gastón Kleiman
diffstat: fix parsing of filenames with spaces...
r13395 elif line.startswith('diff -r'):
timeless
Generally replace "file name" with "filename" in help and comments.
r8761 # format: "diff -r ... -r ... filename"
Gastón Kleiman
diffstat: fix parsing of filenames with spaces...
r13395 filename = diffre.search(line).group(1)
Andrew Zwicky
diffstat: properly count lines starting in '--' or '++' (issue5479)...
r32321 elif line.startswith('@@'):
inheader = False
elif line.startswith('+') and not inheader:
Alexander Solovyov
python implementation of diffstat...
r7547 adds += 1
Andrew Zwicky
diffstat: properly count lines starting in '--' or '++' (issue5479)...
r32321 elif line.startswith('-') and not inheader:
Alexander Solovyov
python implementation of diffstat...
r7547 removes += 1
Patrick Mezard
diffstat: be more picky when marking file as 'binary' (issue2816)...
r15363 elif (line.startswith('GIT binary patch') or
line.startswith('Binary file')):
isbinary = True
Matt Mackall
diffstatdata: no longer a generator...
r14400 addresult()
return results
Alexander Solovyov
python implementation of diffstat...
r7547
Henning Schild
patch: remove unused git parameter from patch.diffstat()...
r30407 def diffstat(lines, width=80):
Alexander Solovyov
python implementation of diffstat...
r7547 output = []
Matt Mackall
patch: use diffstatsum in diffstat
r14402 stats = diffstatdata(lines)
Steven Brown
patch: restore the previous output of 'diff --stat'...
r14437 maxname, maxtotal, totaladds, totalremoves, hasbinary = diffstatsum(stats)
Alexander Solovyov
python implementation of diffstat...
r7547
countwidth = len(str(maxtotal))
Brodie Rao
diffstat: with --git, mark binary files with Bin...
r9642 if hasbinary and countwidth < 3:
countwidth = 3
Brodie Rao
diffstat: scale adds/removes proportionally to graph width...
r9330 graphwidth = width - countwidth - maxname - 6
Alexander Solovyov
python implementation of diffstat...
r7547 if graphwidth < 10:
graphwidth = 10
Brodie Rao
diffstat: scale adds/removes proportionally to graph width...
r9330 def scale(i):
if maxtotal <= graphwidth:
return i
# If diffstat runs out of room it doesn't print anything,
# which isn't very useful, so always print at least one + or -
# if there were at least some changes.
return max(i * graphwidth // maxtotal, int(bool(i)))
Alexander Solovyov
python implementation of diffstat...
r7547
Matt Mackall
patch: use diffstatsum in diffstat
r14402 for filename, adds, removes, isbinary in stats:
Patrick Mezard
diffstat: be more picky when marking file as 'binary' (issue2816)...
r15363 if isbinary:
Brodie Rao
diffstat: with --git, mark binary files with Bin...
r9642 count = 'Bin'
else:
Pulkit Goyal
py3: use '%d' to convert integers to bytes
r33104 count = '%d' % (adds + removes)
Brodie Rao
diffstat: scale adds/removes proportionally to graph width...
r9330 pluses = '+' * scale(adds)
minuses = '-' * scale(removes)
FUJIWARA Katsunori
i18n: use encoding.colwidth() for correct column width...
r11611 output.append(' %s%s | %*s %s%s\n' %
Matt Mackall
patch: use diffstatsum in diffstat
r14402 (filename, ' ' * (maxname - encoding.colwidth(filename)),
countwidth, count, pluses, minuses))
Alexander Solovyov
python implementation of diffstat...
r7547
if stats:
Brodie Rao
cleanup: eradicate long lines
r16683 output.append(_(' %d files changed, %d insertions(+), '
'%d deletions(-)\n')
Matt Mackall
diffstat: use width 80 by default and avoid division by zero
r7860 % (len(stats), totaladds, totalremoves))
Alexander Solovyov
python implementation of diffstat...
r7547
return ''.join(output)
Brodie Rao
diff: make use of output labeling
r10818
def diffstatui(*args, **kw):
'''like diffstat(), but yields 2-tuples of (output, label) for
ui.write()
'''
for line in diffstat(*args, **kw).splitlines():
if line and line[-1] in '+-':
name, graph = line.rsplit(' ', 1)
yield (name + ' ', '')
Pulkit Goyal
py3: add b'' to make the regex pattern bytes
r33096 m = re.search(br'\++', graph)
Brodie Rao
diff: make use of output labeling
r10818 if m:
yield (m.group(0), 'diffstat.inserted')
Pulkit Goyal
py3: add b'' to make the regex pattern bytes
r33096 m = re.search(br'-+', graph)
Brodie Rao
diff: make use of output labeling
r10818 if m:
yield (m.group(0), 'diffstat.deleted')
else:
yield (line, '')
yield ('\n', '')