##// END OF EJS Templates
py3: use '%d' for integers instead of '%s'...
Pulkit Goyal -
r34507:1d804c22 default
parent child Browse files
Show More
@@ -1,779 +1,779 b''
1 # filemerge.py - file-level merge handling for Mercurial
1 # filemerge.py - file-level merge handling for Mercurial
2 #
2 #
3 # Copyright 2006, 2007, 2008 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007, 2008 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import filecmp
10 import filecmp
11 import os
11 import os
12 import re
12 import re
13 import tempfile
13 import tempfile
14
14
15 from .i18n import _
15 from .i18n import _
16 from .node import nullid, short
16 from .node import nullid, short
17
17
18 from . import (
18 from . import (
19 encoding,
19 encoding,
20 error,
20 error,
21 formatter,
21 formatter,
22 match,
22 match,
23 pycompat,
23 pycompat,
24 registrar,
24 registrar,
25 scmutil,
25 scmutil,
26 simplemerge,
26 simplemerge,
27 tagmerge,
27 tagmerge,
28 templatekw,
28 templatekw,
29 templater,
29 templater,
30 util,
30 util,
31 )
31 )
32
32
33 def _toolstr(ui, tool, part, default=""):
33 def _toolstr(ui, tool, part, default=""):
34 return ui.config("merge-tools", tool + "." + part, default)
34 return ui.config("merge-tools", tool + "." + part, default)
35
35
36 def _toolbool(ui, tool, part, default=False):
36 def _toolbool(ui, tool, part, default=False):
37 return ui.configbool("merge-tools", tool + "." + part, default)
37 return ui.configbool("merge-tools", tool + "." + part, default)
38
38
39 def _toollist(ui, tool, part, default=None):
39 def _toollist(ui, tool, part, default=None):
40 if default is None:
40 if default is None:
41 default = []
41 default = []
42 return ui.configlist("merge-tools", tool + "." + part, default)
42 return ui.configlist("merge-tools", tool + "." + part, default)
43
43
44 internals = {}
44 internals = {}
45 # Merge tools to document.
45 # Merge tools to document.
46 internalsdoc = {}
46 internalsdoc = {}
47
47
48 internaltool = registrar.internalmerge()
48 internaltool = registrar.internalmerge()
49
49
50 # internal tool merge types
50 # internal tool merge types
51 nomerge = internaltool.nomerge
51 nomerge = internaltool.nomerge
52 mergeonly = internaltool.mergeonly # just the full merge, no premerge
52 mergeonly = internaltool.mergeonly # just the full merge, no premerge
53 fullmerge = internaltool.fullmerge # both premerge and merge
53 fullmerge = internaltool.fullmerge # both premerge and merge
54
54
55 _localchangedotherdeletedmsg = _(
55 _localchangedotherdeletedmsg = _(
56 "local%(l)s changed %(fd)s which other%(o)s deleted\n"
56 "local%(l)s changed %(fd)s which other%(o)s deleted\n"
57 "use (c)hanged version, (d)elete, or leave (u)nresolved?"
57 "use (c)hanged version, (d)elete, or leave (u)nresolved?"
58 "$$ &Changed $$ &Delete $$ &Unresolved")
58 "$$ &Changed $$ &Delete $$ &Unresolved")
59
59
60 _otherchangedlocaldeletedmsg = _(
60 _otherchangedlocaldeletedmsg = _(
61 "other%(o)s changed %(fd)s which local%(l)s deleted\n"
61 "other%(o)s changed %(fd)s which local%(l)s deleted\n"
62 "use (c)hanged version, leave (d)eleted, or "
62 "use (c)hanged version, leave (d)eleted, or "
63 "leave (u)nresolved?"
63 "leave (u)nresolved?"
64 "$$ &Changed $$ &Deleted $$ &Unresolved")
64 "$$ &Changed $$ &Deleted $$ &Unresolved")
65
65
66 class absentfilectx(object):
66 class absentfilectx(object):
67 """Represents a file that's ostensibly in a context but is actually not
67 """Represents a file that's ostensibly in a context but is actually not
68 present in it.
68 present in it.
69
69
70 This is here because it's very specific to the filemerge code for now --
70 This is here because it's very specific to the filemerge code for now --
71 other code is likely going to break with the values this returns."""
71 other code is likely going to break with the values this returns."""
72 def __init__(self, ctx, f):
72 def __init__(self, ctx, f):
73 self._ctx = ctx
73 self._ctx = ctx
74 self._f = f
74 self._f = f
75
75
76 def path(self):
76 def path(self):
77 return self._f
77 return self._f
78
78
79 def size(self):
79 def size(self):
80 return None
80 return None
81
81
82 def data(self):
82 def data(self):
83 return None
83 return None
84
84
85 def filenode(self):
85 def filenode(self):
86 return nullid
86 return nullid
87
87
88 _customcmp = True
88 _customcmp = True
89 def cmp(self, fctx):
89 def cmp(self, fctx):
90 """compare with other file context
90 """compare with other file context
91
91
92 returns True if different from fctx.
92 returns True if different from fctx.
93 """
93 """
94 return not (fctx.isabsent() and
94 return not (fctx.isabsent() and
95 fctx.ctx() == self.ctx() and
95 fctx.ctx() == self.ctx() and
96 fctx.path() == self.path())
96 fctx.path() == self.path())
97
97
98 def flags(self):
98 def flags(self):
99 return ''
99 return ''
100
100
101 def changectx(self):
101 def changectx(self):
102 return self._ctx
102 return self._ctx
103
103
104 def isbinary(self):
104 def isbinary(self):
105 return False
105 return False
106
106
107 def isabsent(self):
107 def isabsent(self):
108 return True
108 return True
109
109
110 def _findtool(ui, tool):
110 def _findtool(ui, tool):
111 if tool in internals:
111 if tool in internals:
112 return tool
112 return tool
113 return findexternaltool(ui, tool)
113 return findexternaltool(ui, tool)
114
114
115 def findexternaltool(ui, tool):
115 def findexternaltool(ui, tool):
116 for kn in ("regkey", "regkeyalt"):
116 for kn in ("regkey", "regkeyalt"):
117 k = _toolstr(ui, tool, kn)
117 k = _toolstr(ui, tool, kn)
118 if not k:
118 if not k:
119 continue
119 continue
120 p = util.lookupreg(k, _toolstr(ui, tool, "regname"))
120 p = util.lookupreg(k, _toolstr(ui, tool, "regname"))
121 if p:
121 if p:
122 p = util.findexe(p + _toolstr(ui, tool, "regappend"))
122 p = util.findexe(p + _toolstr(ui, tool, "regappend"))
123 if p:
123 if p:
124 return p
124 return p
125 exe = _toolstr(ui, tool, "executable", tool)
125 exe = _toolstr(ui, tool, "executable", tool)
126 return util.findexe(util.expandpath(exe))
126 return util.findexe(util.expandpath(exe))
127
127
128 def _picktool(repo, ui, path, binary, symlink, changedelete):
128 def _picktool(repo, ui, path, binary, symlink, changedelete):
129 def supportscd(tool):
129 def supportscd(tool):
130 return tool in internals and internals[tool].mergetype == nomerge
130 return tool in internals and internals[tool].mergetype == nomerge
131
131
132 def check(tool, pat, symlink, binary, changedelete):
132 def check(tool, pat, symlink, binary, changedelete):
133 tmsg = tool
133 tmsg = tool
134 if pat:
134 if pat:
135 tmsg = _("%s (for pattern %s)") % (tool, pat)
135 tmsg = _("%s (for pattern %s)") % (tool, pat)
136 if not _findtool(ui, tool):
136 if not _findtool(ui, tool):
137 if pat: # explicitly requested tool deserves a warning
137 if pat: # explicitly requested tool deserves a warning
138 ui.warn(_("couldn't find merge tool %s\n") % tmsg)
138 ui.warn(_("couldn't find merge tool %s\n") % tmsg)
139 else: # configured but non-existing tools are more silent
139 else: # configured but non-existing tools are more silent
140 ui.note(_("couldn't find merge tool %s\n") % tmsg)
140 ui.note(_("couldn't find merge tool %s\n") % tmsg)
141 elif symlink and not _toolbool(ui, tool, "symlink"):
141 elif symlink and not _toolbool(ui, tool, "symlink"):
142 ui.warn(_("tool %s can't handle symlinks\n") % tmsg)
142 ui.warn(_("tool %s can't handle symlinks\n") % tmsg)
143 elif binary and not _toolbool(ui, tool, "binary"):
143 elif binary and not _toolbool(ui, tool, "binary"):
144 ui.warn(_("tool %s can't handle binary\n") % tmsg)
144 ui.warn(_("tool %s can't handle binary\n") % tmsg)
145 elif changedelete and not supportscd(tool):
145 elif changedelete and not supportscd(tool):
146 # the nomerge tools are the only tools that support change/delete
146 # the nomerge tools are the only tools that support change/delete
147 # conflicts
147 # conflicts
148 pass
148 pass
149 elif not util.gui() and _toolbool(ui, tool, "gui"):
149 elif not util.gui() and _toolbool(ui, tool, "gui"):
150 ui.warn(_("tool %s requires a GUI\n") % tmsg)
150 ui.warn(_("tool %s requires a GUI\n") % tmsg)
151 else:
151 else:
152 return True
152 return True
153 return False
153 return False
154
154
155 # internal config: ui.forcemerge
155 # internal config: ui.forcemerge
156 # forcemerge comes from command line arguments, highest priority
156 # forcemerge comes from command line arguments, highest priority
157 force = ui.config('ui', 'forcemerge')
157 force = ui.config('ui', 'forcemerge')
158 if force:
158 if force:
159 toolpath = _findtool(ui, force)
159 toolpath = _findtool(ui, force)
160 if changedelete and not supportscd(toolpath):
160 if changedelete and not supportscd(toolpath):
161 return ":prompt", None
161 return ":prompt", None
162 else:
162 else:
163 if toolpath:
163 if toolpath:
164 return (force, util.shellquote(toolpath))
164 return (force, util.shellquote(toolpath))
165 else:
165 else:
166 # mimic HGMERGE if given tool not found
166 # mimic HGMERGE if given tool not found
167 return (force, force)
167 return (force, force)
168
168
169 # HGMERGE takes next precedence
169 # HGMERGE takes next precedence
170 hgmerge = encoding.environ.get("HGMERGE")
170 hgmerge = encoding.environ.get("HGMERGE")
171 if hgmerge:
171 if hgmerge:
172 if changedelete and not supportscd(hgmerge):
172 if changedelete and not supportscd(hgmerge):
173 return ":prompt", None
173 return ":prompt", None
174 else:
174 else:
175 return (hgmerge, hgmerge)
175 return (hgmerge, hgmerge)
176
176
177 # then patterns
177 # then patterns
178 for pat, tool in ui.configitems("merge-patterns"):
178 for pat, tool in ui.configitems("merge-patterns"):
179 mf = match.match(repo.root, '', [pat])
179 mf = match.match(repo.root, '', [pat])
180 if mf(path) and check(tool, pat, symlink, False, changedelete):
180 if mf(path) and check(tool, pat, symlink, False, changedelete):
181 toolpath = _findtool(ui, tool)
181 toolpath = _findtool(ui, tool)
182 return (tool, util.shellquote(toolpath))
182 return (tool, util.shellquote(toolpath))
183
183
184 # then merge tools
184 # then merge tools
185 tools = {}
185 tools = {}
186 disabled = set()
186 disabled = set()
187 for k, v in ui.configitems("merge-tools"):
187 for k, v in ui.configitems("merge-tools"):
188 t = k.split('.')[0]
188 t = k.split('.')[0]
189 if t not in tools:
189 if t not in tools:
190 tools[t] = int(_toolstr(ui, t, "priority", "0"))
190 tools[t] = int(_toolstr(ui, t, "priority", "0"))
191 if _toolbool(ui, t, "disabled", False):
191 if _toolbool(ui, t, "disabled", False):
192 disabled.add(t)
192 disabled.add(t)
193 names = tools.keys()
193 names = tools.keys()
194 tools = sorted([(-p, tool) for tool, p in tools.items()
194 tools = sorted([(-p, tool) for tool, p in tools.items()
195 if tool not in disabled])
195 if tool not in disabled])
196 uimerge = ui.config("ui", "merge")
196 uimerge = ui.config("ui", "merge")
197 if uimerge:
197 if uimerge:
198 # external tools defined in uimerge won't be able to handle
198 # external tools defined in uimerge won't be able to handle
199 # change/delete conflicts
199 # change/delete conflicts
200 if uimerge not in names and not changedelete:
200 if uimerge not in names and not changedelete:
201 return (uimerge, uimerge)
201 return (uimerge, uimerge)
202 tools.insert(0, (None, uimerge)) # highest priority
202 tools.insert(0, (None, uimerge)) # highest priority
203 tools.append((None, "hgmerge")) # the old default, if found
203 tools.append((None, "hgmerge")) # the old default, if found
204 for p, t in tools:
204 for p, t in tools:
205 if check(t, None, symlink, binary, changedelete):
205 if check(t, None, symlink, binary, changedelete):
206 toolpath = _findtool(ui, t)
206 toolpath = _findtool(ui, t)
207 return (t, util.shellquote(toolpath))
207 return (t, util.shellquote(toolpath))
208
208
209 # internal merge or prompt as last resort
209 # internal merge or prompt as last resort
210 if symlink or binary or changedelete:
210 if symlink or binary or changedelete:
211 if not changedelete and len(tools):
211 if not changedelete and len(tools):
212 # any tool is rejected by capability for symlink or binary
212 # any tool is rejected by capability for symlink or binary
213 ui.warn(_("no tool found to merge %s\n") % path)
213 ui.warn(_("no tool found to merge %s\n") % path)
214 return ":prompt", None
214 return ":prompt", None
215 return ":merge", None
215 return ":merge", None
216
216
217 def _eoltype(data):
217 def _eoltype(data):
218 "Guess the EOL type of a file"
218 "Guess the EOL type of a file"
219 if '\0' in data: # binary
219 if '\0' in data: # binary
220 return None
220 return None
221 if '\r\n' in data: # Windows
221 if '\r\n' in data: # Windows
222 return '\r\n'
222 return '\r\n'
223 if '\r' in data: # Old Mac
223 if '\r' in data: # Old Mac
224 return '\r'
224 return '\r'
225 if '\n' in data: # UNIX
225 if '\n' in data: # UNIX
226 return '\n'
226 return '\n'
227 return None # unknown
227 return None # unknown
228
228
229 def _matcheol(file, origfile):
229 def _matcheol(file, origfile):
230 "Convert EOL markers in a file to match origfile"
230 "Convert EOL markers in a file to match origfile"
231 tostyle = _eoltype(util.readfile(origfile))
231 tostyle = _eoltype(util.readfile(origfile))
232 if tostyle:
232 if tostyle:
233 data = util.readfile(file)
233 data = util.readfile(file)
234 style = _eoltype(data)
234 style = _eoltype(data)
235 if style:
235 if style:
236 newdata = data.replace(style, tostyle)
236 newdata = data.replace(style, tostyle)
237 if newdata != data:
237 if newdata != data:
238 util.writefile(file, newdata)
238 util.writefile(file, newdata)
239
239
240 @internaltool('prompt', nomerge)
240 @internaltool('prompt', nomerge)
241 def _iprompt(repo, mynode, orig, fcd, fco, fca, toolconf, labels=None):
241 def _iprompt(repo, mynode, orig, fcd, fco, fca, toolconf, labels=None):
242 """Asks the user which of the local `p1()` or the other `p2()` version to
242 """Asks the user which of the local `p1()` or the other `p2()` version to
243 keep as the merged version."""
243 keep as the merged version."""
244 ui = repo.ui
244 ui = repo.ui
245 fd = fcd.path()
245 fd = fcd.path()
246
246
247 prompts = partextras(labels)
247 prompts = partextras(labels)
248 prompts['fd'] = fd
248 prompts['fd'] = fd
249 try:
249 try:
250 if fco.isabsent():
250 if fco.isabsent():
251 index = ui.promptchoice(
251 index = ui.promptchoice(
252 _localchangedotherdeletedmsg % prompts, 2)
252 _localchangedotherdeletedmsg % prompts, 2)
253 choice = ['local', 'other', 'unresolved'][index]
253 choice = ['local', 'other', 'unresolved'][index]
254 elif fcd.isabsent():
254 elif fcd.isabsent():
255 index = ui.promptchoice(
255 index = ui.promptchoice(
256 _otherchangedlocaldeletedmsg % prompts, 2)
256 _otherchangedlocaldeletedmsg % prompts, 2)
257 choice = ['other', 'local', 'unresolved'][index]
257 choice = ['other', 'local', 'unresolved'][index]
258 else:
258 else:
259 index = ui.promptchoice(
259 index = ui.promptchoice(
260 _("keep (l)ocal%(l)s, take (o)ther%(o)s, or leave (u)nresolved"
260 _("keep (l)ocal%(l)s, take (o)ther%(o)s, or leave (u)nresolved"
261 " for %(fd)s?"
261 " for %(fd)s?"
262 "$$ &Local $$ &Other $$ &Unresolved") % prompts, 2)
262 "$$ &Local $$ &Other $$ &Unresolved") % prompts, 2)
263 choice = ['local', 'other', 'unresolved'][index]
263 choice = ['local', 'other', 'unresolved'][index]
264
264
265 if choice == 'other':
265 if choice == 'other':
266 return _iother(repo, mynode, orig, fcd, fco, fca, toolconf,
266 return _iother(repo, mynode, orig, fcd, fco, fca, toolconf,
267 labels)
267 labels)
268 elif choice == 'local':
268 elif choice == 'local':
269 return _ilocal(repo, mynode, orig, fcd, fco, fca, toolconf,
269 return _ilocal(repo, mynode, orig, fcd, fco, fca, toolconf,
270 labels)
270 labels)
271 elif choice == 'unresolved':
271 elif choice == 'unresolved':
272 return _ifail(repo, mynode, orig, fcd, fco, fca, toolconf,
272 return _ifail(repo, mynode, orig, fcd, fco, fca, toolconf,
273 labels)
273 labels)
274 except error.ResponseExpected:
274 except error.ResponseExpected:
275 ui.write("\n")
275 ui.write("\n")
276 return _ifail(repo, mynode, orig, fcd, fco, fca, toolconf,
276 return _ifail(repo, mynode, orig, fcd, fco, fca, toolconf,
277 labels)
277 labels)
278
278
279 @internaltool('local', nomerge)
279 @internaltool('local', nomerge)
280 def _ilocal(repo, mynode, orig, fcd, fco, fca, toolconf, labels=None):
280 def _ilocal(repo, mynode, orig, fcd, fco, fca, toolconf, labels=None):
281 """Uses the local `p1()` version of files as the merged version."""
281 """Uses the local `p1()` version of files as the merged version."""
282 return 0, fcd.isabsent()
282 return 0, fcd.isabsent()
283
283
284 @internaltool('other', nomerge)
284 @internaltool('other', nomerge)
285 def _iother(repo, mynode, orig, fcd, fco, fca, toolconf, labels=None):
285 def _iother(repo, mynode, orig, fcd, fco, fca, toolconf, labels=None):
286 """Uses the other `p2()` version of files as the merged version."""
286 """Uses the other `p2()` version of files as the merged version."""
287 if fco.isabsent():
287 if fco.isabsent():
288 # local changed, remote deleted -- 'deleted' picked
288 # local changed, remote deleted -- 'deleted' picked
289 _underlyingfctxifabsent(fcd).remove()
289 _underlyingfctxifabsent(fcd).remove()
290 deleted = True
290 deleted = True
291 else:
291 else:
292 _underlyingfctxifabsent(fcd).write(fco.data(), fco.flags())
292 _underlyingfctxifabsent(fcd).write(fco.data(), fco.flags())
293 deleted = False
293 deleted = False
294 return 0, deleted
294 return 0, deleted
295
295
296 @internaltool('fail', nomerge)
296 @internaltool('fail', nomerge)
297 def _ifail(repo, mynode, orig, fcd, fco, fca, toolconf, labels=None):
297 def _ifail(repo, mynode, orig, fcd, fco, fca, toolconf, labels=None):
298 """
298 """
299 Rather than attempting to merge files that were modified on both
299 Rather than attempting to merge files that were modified on both
300 branches, it marks them as unresolved. The resolve command must be
300 branches, it marks them as unresolved. The resolve command must be
301 used to resolve these conflicts."""
301 used to resolve these conflicts."""
302 # for change/delete conflicts write out the changed version, then fail
302 # for change/delete conflicts write out the changed version, then fail
303 if fcd.isabsent():
303 if fcd.isabsent():
304 _underlyingfctxifabsent(fcd).write(fco.data(), fco.flags())
304 _underlyingfctxifabsent(fcd).write(fco.data(), fco.flags())
305 return 1, False
305 return 1, False
306
306
307 def _underlyingfctxifabsent(filectx):
307 def _underlyingfctxifabsent(filectx):
308 """Sometimes when resolving, our fcd is actually an absentfilectx, but
308 """Sometimes when resolving, our fcd is actually an absentfilectx, but
309 we want to write to it (to do the resolve). This helper returns the
309 we want to write to it (to do the resolve). This helper returns the
310 underyling workingfilectx in that case.
310 underyling workingfilectx in that case.
311 """
311 """
312 if filectx.isabsent():
312 if filectx.isabsent():
313 return filectx.changectx()[filectx.path()]
313 return filectx.changectx()[filectx.path()]
314 else:
314 else:
315 return filectx
315 return filectx
316
316
317 def _premerge(repo, fcd, fco, fca, toolconf, files, labels=None):
317 def _premerge(repo, fcd, fco, fca, toolconf, files, labels=None):
318 tool, toolpath, binary, symlink = toolconf
318 tool, toolpath, binary, symlink = toolconf
319 if symlink or fcd.isabsent() or fco.isabsent():
319 if symlink or fcd.isabsent() or fco.isabsent():
320 return 1
320 return 1
321 unused, unused, unused, back = files
321 unused, unused, unused, back = files
322
322
323 ui = repo.ui
323 ui = repo.ui
324
324
325 validkeep = ['keep', 'keep-merge3']
325 validkeep = ['keep', 'keep-merge3']
326
326
327 # do we attempt to simplemerge first?
327 # do we attempt to simplemerge first?
328 try:
328 try:
329 premerge = _toolbool(ui, tool, "premerge", not binary)
329 premerge = _toolbool(ui, tool, "premerge", not binary)
330 except error.ConfigError:
330 except error.ConfigError:
331 premerge = _toolstr(ui, tool, "premerge").lower()
331 premerge = _toolstr(ui, tool, "premerge").lower()
332 if premerge not in validkeep:
332 if premerge not in validkeep:
333 _valid = ', '.join(["'" + v + "'" for v in validkeep])
333 _valid = ', '.join(["'" + v + "'" for v in validkeep])
334 raise error.ConfigError(_("%s.premerge not valid "
334 raise error.ConfigError(_("%s.premerge not valid "
335 "('%s' is neither boolean nor %s)") %
335 "('%s' is neither boolean nor %s)") %
336 (tool, premerge, _valid))
336 (tool, premerge, _valid))
337
337
338 if premerge:
338 if premerge:
339 if premerge == 'keep-merge3':
339 if premerge == 'keep-merge3':
340 if not labels:
340 if not labels:
341 labels = _defaultconflictlabels
341 labels = _defaultconflictlabels
342 if len(labels) < 3:
342 if len(labels) < 3:
343 labels.append('base')
343 labels.append('base')
344 r = simplemerge.simplemerge(ui, fcd, fca, fco, quiet=True, label=labels)
344 r = simplemerge.simplemerge(ui, fcd, fca, fco, quiet=True, label=labels)
345 if not r:
345 if not r:
346 ui.debug(" premerge successful\n")
346 ui.debug(" premerge successful\n")
347 return 0
347 return 0
348 if premerge not in validkeep:
348 if premerge not in validkeep:
349 # restore from backup and try again
349 # restore from backup and try again
350 _restorebackup(fcd, back)
350 _restorebackup(fcd, back)
351 return 1 # continue merging
351 return 1 # continue merging
352
352
353 def _mergecheck(repo, mynode, orig, fcd, fco, fca, toolconf):
353 def _mergecheck(repo, mynode, orig, fcd, fco, fca, toolconf):
354 tool, toolpath, binary, symlink = toolconf
354 tool, toolpath, binary, symlink = toolconf
355 if symlink:
355 if symlink:
356 repo.ui.warn(_('warning: internal %s cannot merge symlinks '
356 repo.ui.warn(_('warning: internal %s cannot merge symlinks '
357 'for %s\n') % (tool, fcd.path()))
357 'for %s\n') % (tool, fcd.path()))
358 return False
358 return False
359 if fcd.isabsent() or fco.isabsent():
359 if fcd.isabsent() or fco.isabsent():
360 repo.ui.warn(_('warning: internal %s cannot merge change/delete '
360 repo.ui.warn(_('warning: internal %s cannot merge change/delete '
361 'conflict for %s\n') % (tool, fcd.path()))
361 'conflict for %s\n') % (tool, fcd.path()))
362 return False
362 return False
363 return True
363 return True
364
364
365 def _merge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels, mode):
365 def _merge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels, mode):
366 """
366 """
367 Uses the internal non-interactive simple merge algorithm for merging
367 Uses the internal non-interactive simple merge algorithm for merging
368 files. It will fail if there are any conflicts and leave markers in
368 files. It will fail if there are any conflicts and leave markers in
369 the partially merged file. Markers will have two sections, one for each side
369 the partially merged file. Markers will have two sections, one for each side
370 of merge, unless mode equals 'union' which suppresses the markers."""
370 of merge, unless mode equals 'union' which suppresses the markers."""
371 ui = repo.ui
371 ui = repo.ui
372
372
373 r = simplemerge.simplemerge(ui, fcd, fca, fco, label=labels, mode=mode)
373 r = simplemerge.simplemerge(ui, fcd, fca, fco, label=labels, mode=mode)
374 return True, r, False
374 return True, r, False
375
375
376 @internaltool('union', fullmerge,
376 @internaltool('union', fullmerge,
377 _("warning: conflicts while merging %s! "
377 _("warning: conflicts while merging %s! "
378 "(edit, then use 'hg resolve --mark')\n"),
378 "(edit, then use 'hg resolve --mark')\n"),
379 precheck=_mergecheck)
379 precheck=_mergecheck)
380 def _iunion(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
380 def _iunion(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
381 """
381 """
382 Uses the internal non-interactive simple merge algorithm for merging
382 Uses the internal non-interactive simple merge algorithm for merging
383 files. It will use both left and right sides for conflict regions.
383 files. It will use both left and right sides for conflict regions.
384 No markers are inserted."""
384 No markers are inserted."""
385 return _merge(repo, mynode, orig, fcd, fco, fca, toolconf,
385 return _merge(repo, mynode, orig, fcd, fco, fca, toolconf,
386 files, labels, 'union')
386 files, labels, 'union')
387
387
388 @internaltool('merge', fullmerge,
388 @internaltool('merge', fullmerge,
389 _("warning: conflicts while merging %s! "
389 _("warning: conflicts while merging %s! "
390 "(edit, then use 'hg resolve --mark')\n"),
390 "(edit, then use 'hg resolve --mark')\n"),
391 precheck=_mergecheck)
391 precheck=_mergecheck)
392 def _imerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
392 def _imerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
393 """
393 """
394 Uses the internal non-interactive simple merge algorithm for merging
394 Uses the internal non-interactive simple merge algorithm for merging
395 files. It will fail if there are any conflicts and leave markers in
395 files. It will fail if there are any conflicts and leave markers in
396 the partially merged file. Markers will have two sections, one for each side
396 the partially merged file. Markers will have two sections, one for each side
397 of merge."""
397 of merge."""
398 return _merge(repo, mynode, orig, fcd, fco, fca, toolconf,
398 return _merge(repo, mynode, orig, fcd, fco, fca, toolconf,
399 files, labels, 'merge')
399 files, labels, 'merge')
400
400
401 @internaltool('merge3', fullmerge,
401 @internaltool('merge3', fullmerge,
402 _("warning: conflicts while merging %s! "
402 _("warning: conflicts while merging %s! "
403 "(edit, then use 'hg resolve --mark')\n"),
403 "(edit, then use 'hg resolve --mark')\n"),
404 precheck=_mergecheck)
404 precheck=_mergecheck)
405 def _imerge3(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
405 def _imerge3(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
406 """
406 """
407 Uses the internal non-interactive simple merge algorithm for merging
407 Uses the internal non-interactive simple merge algorithm for merging
408 files. It will fail if there are any conflicts and leave markers in
408 files. It will fail if there are any conflicts and leave markers in
409 the partially merged file. Marker will have three sections, one from each
409 the partially merged file. Marker will have three sections, one from each
410 side of the merge and one for the base content."""
410 side of the merge and one for the base content."""
411 if not labels:
411 if not labels:
412 labels = _defaultconflictlabels
412 labels = _defaultconflictlabels
413 if len(labels) < 3:
413 if len(labels) < 3:
414 labels.append('base')
414 labels.append('base')
415 return _imerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels)
415 return _imerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels)
416
416
417 def _imergeauto(repo, mynode, orig, fcd, fco, fca, toolconf, files,
417 def _imergeauto(repo, mynode, orig, fcd, fco, fca, toolconf, files,
418 labels=None, localorother=None):
418 labels=None, localorother=None):
419 """
419 """
420 Generic driver for _imergelocal and _imergeother
420 Generic driver for _imergelocal and _imergeother
421 """
421 """
422 assert localorother is not None
422 assert localorother is not None
423 tool, toolpath, binary, symlink = toolconf
423 tool, toolpath, binary, symlink = toolconf
424 r = simplemerge.simplemerge(repo.ui, fcd, fca, fco, label=labels,
424 r = simplemerge.simplemerge(repo.ui, fcd, fca, fco, label=labels,
425 localorother=localorother)
425 localorother=localorother)
426 return True, r
426 return True, r
427
427
428 @internaltool('merge-local', mergeonly, precheck=_mergecheck)
428 @internaltool('merge-local', mergeonly, precheck=_mergecheck)
429 def _imergelocal(*args, **kwargs):
429 def _imergelocal(*args, **kwargs):
430 """
430 """
431 Like :merge, but resolve all conflicts non-interactively in favor
431 Like :merge, but resolve all conflicts non-interactively in favor
432 of the local `p1()` changes."""
432 of the local `p1()` changes."""
433 success, status = _imergeauto(localorother='local', *args, **kwargs)
433 success, status = _imergeauto(localorother='local', *args, **kwargs)
434 return success, status, False
434 return success, status, False
435
435
436 @internaltool('merge-other', mergeonly, precheck=_mergecheck)
436 @internaltool('merge-other', mergeonly, precheck=_mergecheck)
437 def _imergeother(*args, **kwargs):
437 def _imergeother(*args, **kwargs):
438 """
438 """
439 Like :merge, but resolve all conflicts non-interactively in favor
439 Like :merge, but resolve all conflicts non-interactively in favor
440 of the other `p2()` changes."""
440 of the other `p2()` changes."""
441 success, status = _imergeauto(localorother='other', *args, **kwargs)
441 success, status = _imergeauto(localorother='other', *args, **kwargs)
442 return success, status, False
442 return success, status, False
443
443
444 @internaltool('tagmerge', mergeonly,
444 @internaltool('tagmerge', mergeonly,
445 _("automatic tag merging of %s failed! "
445 _("automatic tag merging of %s failed! "
446 "(use 'hg resolve --tool :merge' or another merge "
446 "(use 'hg resolve --tool :merge' or another merge "
447 "tool of your choice)\n"))
447 "tool of your choice)\n"))
448 def _itagmerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
448 def _itagmerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
449 """
449 """
450 Uses the internal tag merge algorithm (experimental).
450 Uses the internal tag merge algorithm (experimental).
451 """
451 """
452 success, status = tagmerge.merge(repo, fcd, fco, fca)
452 success, status = tagmerge.merge(repo, fcd, fco, fca)
453 return success, status, False
453 return success, status, False
454
454
455 @internaltool('dump', fullmerge)
455 @internaltool('dump', fullmerge)
456 def _idump(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
456 def _idump(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
457 """
457 """
458 Creates three versions of the files to merge, containing the
458 Creates three versions of the files to merge, containing the
459 contents of local, other and base. These files can then be used to
459 contents of local, other and base. These files can then be used to
460 perform a merge manually. If the file to be merged is named
460 perform a merge manually. If the file to be merged is named
461 ``a.txt``, these files will accordingly be named ``a.txt.local``,
461 ``a.txt``, these files will accordingly be named ``a.txt.local``,
462 ``a.txt.other`` and ``a.txt.base`` and they will be placed in the
462 ``a.txt.other`` and ``a.txt.base`` and they will be placed in the
463 same directory as ``a.txt``.
463 same directory as ``a.txt``.
464
464
465 This implies permerge. Therefore, files aren't dumped, if premerge
465 This implies permerge. Therefore, files aren't dumped, if premerge
466 runs successfully. Use :forcedump to forcibly write files out.
466 runs successfully. Use :forcedump to forcibly write files out.
467 """
467 """
468 a = _workingpath(repo, fcd)
468 a = _workingpath(repo, fcd)
469 fd = fcd.path()
469 fd = fcd.path()
470
470
471 util.writefile(a + ".local", fcd.decodeddata())
471 util.writefile(a + ".local", fcd.decodeddata())
472 repo.wwrite(fd + ".other", fco.data(), fco.flags())
472 repo.wwrite(fd + ".other", fco.data(), fco.flags())
473 repo.wwrite(fd + ".base", fca.data(), fca.flags())
473 repo.wwrite(fd + ".base", fca.data(), fca.flags())
474 return False, 1, False
474 return False, 1, False
475
475
476 @internaltool('forcedump', mergeonly)
476 @internaltool('forcedump', mergeonly)
477 def _forcedump(repo, mynode, orig, fcd, fco, fca, toolconf, files,
477 def _forcedump(repo, mynode, orig, fcd, fco, fca, toolconf, files,
478 labels=None):
478 labels=None):
479 """
479 """
480 Creates three versions of the files as same as :dump, but omits premerge.
480 Creates three versions of the files as same as :dump, but omits premerge.
481 """
481 """
482 return _idump(repo, mynode, orig, fcd, fco, fca, toolconf, files,
482 return _idump(repo, mynode, orig, fcd, fco, fca, toolconf, files,
483 labels=labels)
483 labels=labels)
484
484
485 def _xmerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
485 def _xmerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
486 tool, toolpath, binary, symlink = toolconf
486 tool, toolpath, binary, symlink = toolconf
487 if fcd.isabsent() or fco.isabsent():
487 if fcd.isabsent() or fco.isabsent():
488 repo.ui.warn(_('warning: %s cannot merge change/delete conflict '
488 repo.ui.warn(_('warning: %s cannot merge change/delete conflict '
489 'for %s\n') % (tool, fcd.path()))
489 'for %s\n') % (tool, fcd.path()))
490 return False, 1, None
490 return False, 1, None
491 unused, unused, unused, back = files
491 unused, unused, unused, back = files
492 a = _workingpath(repo, fcd)
492 a = _workingpath(repo, fcd)
493 b, c = _maketempfiles(repo, fco, fca)
493 b, c = _maketempfiles(repo, fco, fca)
494 try:
494 try:
495 out = ""
495 out = ""
496 env = {'HG_FILE': fcd.path(),
496 env = {'HG_FILE': fcd.path(),
497 'HG_MY_NODE': short(mynode),
497 'HG_MY_NODE': short(mynode),
498 'HG_OTHER_NODE': str(fco.changectx()),
498 'HG_OTHER_NODE': str(fco.changectx()),
499 'HG_BASE_NODE': str(fca.changectx()),
499 'HG_BASE_NODE': str(fca.changectx()),
500 'HG_MY_ISLINK': 'l' in fcd.flags(),
500 'HG_MY_ISLINK': 'l' in fcd.flags(),
501 'HG_OTHER_ISLINK': 'l' in fco.flags(),
501 'HG_OTHER_ISLINK': 'l' in fco.flags(),
502 'HG_BASE_ISLINK': 'l' in fca.flags(),
502 'HG_BASE_ISLINK': 'l' in fca.flags(),
503 }
503 }
504 ui = repo.ui
504 ui = repo.ui
505
505
506 args = _toolstr(ui, tool, "args", '$local $base $other')
506 args = _toolstr(ui, tool, "args", '$local $base $other')
507 if "$output" in args:
507 if "$output" in args:
508 out, a = a, back # read input from backup, write to original
508 out, a = a, back # read input from backup, write to original
509 replace = {'local': a, 'base': b, 'other': c, 'output': out}
509 replace = {'local': a, 'base': b, 'other': c, 'output': out}
510 args = util.interpolate(r'\$', replace, args,
510 args = util.interpolate(r'\$', replace, args,
511 lambda s: util.shellquote(util.localpath(s)))
511 lambda s: util.shellquote(util.localpath(s)))
512 cmd = toolpath + ' ' + args
512 cmd = toolpath + ' ' + args
513 if _toolbool(ui, tool, "gui"):
513 if _toolbool(ui, tool, "gui"):
514 repo.ui.status(_('running merge tool %s for file %s\n') %
514 repo.ui.status(_('running merge tool %s for file %s\n') %
515 (tool, fcd.path()))
515 (tool, fcd.path()))
516 repo.ui.debug('launching merge tool: %s\n' % cmd)
516 repo.ui.debug('launching merge tool: %s\n' % cmd)
517 r = ui.system(cmd, cwd=repo.root, environ=env, blockedtag='mergetool')
517 r = ui.system(cmd, cwd=repo.root, environ=env, blockedtag='mergetool')
518 repo.ui.debug('merge tool returned: %s\n' % r)
518 repo.ui.debug('merge tool returned: %d\n' % r)
519 return True, r, False
519 return True, r, False
520 finally:
520 finally:
521 util.unlink(b)
521 util.unlink(b)
522 util.unlink(c)
522 util.unlink(c)
523
523
524 def _formatconflictmarker(repo, ctx, template, label, pad):
524 def _formatconflictmarker(repo, ctx, template, label, pad):
525 """Applies the given template to the ctx, prefixed by the label.
525 """Applies the given template to the ctx, prefixed by the label.
526
526
527 Pad is the minimum width of the label prefix, so that multiple markers
527 Pad is the minimum width of the label prefix, so that multiple markers
528 can have aligned templated parts.
528 can have aligned templated parts.
529 """
529 """
530 if ctx.node() is None:
530 if ctx.node() is None:
531 ctx = ctx.p1()
531 ctx = ctx.p1()
532
532
533 props = templatekw.keywords.copy()
533 props = templatekw.keywords.copy()
534 props['templ'] = template
534 props['templ'] = template
535 props['ctx'] = ctx
535 props['ctx'] = ctx
536 props['repo'] = repo
536 props['repo'] = repo
537 templateresult = template.render(props)
537 templateresult = template.render(props)
538
538
539 label = ('%s:' % label).ljust(pad + 1)
539 label = ('%s:' % label).ljust(pad + 1)
540 mark = '%s %s' % (label, templateresult)
540 mark = '%s %s' % (label, templateresult)
541
541
542 if mark:
542 if mark:
543 mark = mark.splitlines()[0] # split for safety
543 mark = mark.splitlines()[0] # split for safety
544
544
545 # 8 for the prefix of conflict marker lines (e.g. '<<<<<<< ')
545 # 8 for the prefix of conflict marker lines (e.g. '<<<<<<< ')
546 return util.ellipsis(mark, 80 - 8)
546 return util.ellipsis(mark, 80 - 8)
547
547
548 _defaultconflictlabels = ['local', 'other']
548 _defaultconflictlabels = ['local', 'other']
549
549
550 def _formatlabels(repo, fcd, fco, fca, labels):
550 def _formatlabels(repo, fcd, fco, fca, labels):
551 """Formats the given labels using the conflict marker template.
551 """Formats the given labels using the conflict marker template.
552
552
553 Returns a list of formatted labels.
553 Returns a list of formatted labels.
554 """
554 """
555 cd = fcd.changectx()
555 cd = fcd.changectx()
556 co = fco.changectx()
556 co = fco.changectx()
557 ca = fca.changectx()
557 ca = fca.changectx()
558
558
559 ui = repo.ui
559 ui = repo.ui
560 template = ui.config('ui', 'mergemarkertemplate')
560 template = ui.config('ui', 'mergemarkertemplate')
561 template = templater.unquotestring(template)
561 template = templater.unquotestring(template)
562 tmpl = formatter.maketemplater(ui, template)
562 tmpl = formatter.maketemplater(ui, template)
563
563
564 pad = max(len(l) for l in labels)
564 pad = max(len(l) for l in labels)
565
565
566 newlabels = [_formatconflictmarker(repo, cd, tmpl, labels[0], pad),
566 newlabels = [_formatconflictmarker(repo, cd, tmpl, labels[0], pad),
567 _formatconflictmarker(repo, co, tmpl, labels[1], pad)]
567 _formatconflictmarker(repo, co, tmpl, labels[1], pad)]
568 if len(labels) > 2:
568 if len(labels) > 2:
569 newlabels.append(_formatconflictmarker(repo, ca, tmpl, labels[2], pad))
569 newlabels.append(_formatconflictmarker(repo, ca, tmpl, labels[2], pad))
570 return newlabels
570 return newlabels
571
571
572 def partextras(labels):
572 def partextras(labels):
573 """Return a dictionary of extra labels for use in prompts to the user
573 """Return a dictionary of extra labels for use in prompts to the user
574
574
575 Intended use is in strings of the form "(l)ocal%(l)s".
575 Intended use is in strings of the form "(l)ocal%(l)s".
576 """
576 """
577 if labels is None:
577 if labels is None:
578 return {
578 return {
579 "l": "",
579 "l": "",
580 "o": "",
580 "o": "",
581 }
581 }
582
582
583 return {
583 return {
584 "l": " [%s]" % labels[0],
584 "l": " [%s]" % labels[0],
585 "o": " [%s]" % labels[1],
585 "o": " [%s]" % labels[1],
586 }
586 }
587
587
588 def _restorebackup(fcd, back):
588 def _restorebackup(fcd, back):
589 # TODO: Add a workingfilectx.write(otherfilectx) path so we can use
589 # TODO: Add a workingfilectx.write(otherfilectx) path so we can use
590 # util.copy here instead.
590 # util.copy here instead.
591 fcd.write(util.readfile(back), fcd.flags())
591 fcd.write(util.readfile(back), fcd.flags())
592
592
593 def _makebackup(repo, ui, fcd, premerge):
593 def _makebackup(repo, ui, fcd, premerge):
594 """Makes a backup of the local `fcd` file prior to merging.
594 """Makes a backup of the local `fcd` file prior to merging.
595
595
596 In addition to preserving the user's pre-existing modifications to `fcd`
596 In addition to preserving the user's pre-existing modifications to `fcd`
597 (if any), the backup is used to undo certain premerges, confirm whether a
597 (if any), the backup is used to undo certain premerges, confirm whether a
598 merge changed anything, and determine what line endings the new file should
598 merge changed anything, and determine what line endings the new file should
599 have.
599 have.
600 """
600 """
601 if fcd.isabsent():
601 if fcd.isabsent():
602 return None
602 return None
603
603
604 a = _workingpath(repo, fcd)
604 a = _workingpath(repo, fcd)
605 back = scmutil.origpath(ui, repo, a)
605 back = scmutil.origpath(ui, repo, a)
606 if premerge:
606 if premerge:
607 util.copyfile(a, back)
607 util.copyfile(a, back)
608 return back
608 return back
609
609
610 def _maketempfiles(repo, fco, fca):
610 def _maketempfiles(repo, fco, fca):
611 """Writes out `fco` and `fca` as temporary files, so an external merge
611 """Writes out `fco` and `fca` as temporary files, so an external merge
612 tool may use them.
612 tool may use them.
613 """
613 """
614 def temp(prefix, ctx):
614 def temp(prefix, ctx):
615 fullbase, ext = os.path.splitext(ctx.path())
615 fullbase, ext = os.path.splitext(ctx.path())
616 pre = "%s~%s." % (os.path.basename(fullbase), prefix)
616 pre = "%s~%s." % (os.path.basename(fullbase), prefix)
617 (fd, name) = tempfile.mkstemp(prefix=pre, suffix=ext)
617 (fd, name) = tempfile.mkstemp(prefix=pre, suffix=ext)
618 data = repo.wwritedata(ctx.path(), ctx.data())
618 data = repo.wwritedata(ctx.path(), ctx.data())
619 f = os.fdopen(fd, pycompat.sysstr("wb"))
619 f = os.fdopen(fd, pycompat.sysstr("wb"))
620 f.write(data)
620 f.write(data)
621 f.close()
621 f.close()
622 return name
622 return name
623
623
624 b = temp("base", fca)
624 b = temp("base", fca)
625 c = temp("other", fco)
625 c = temp("other", fco)
626
626
627 return b, c
627 return b, c
628
628
629 def _filemerge(premerge, repo, wctx, mynode, orig, fcd, fco, fca, labels=None):
629 def _filemerge(premerge, repo, wctx, mynode, orig, fcd, fco, fca, labels=None):
630 """perform a 3-way merge in the working directory
630 """perform a 3-way merge in the working directory
631
631
632 premerge = whether this is a premerge
632 premerge = whether this is a premerge
633 mynode = parent node before merge
633 mynode = parent node before merge
634 orig = original local filename before merge
634 orig = original local filename before merge
635 fco = other file context
635 fco = other file context
636 fca = ancestor file context
636 fca = ancestor file context
637 fcd = local file context for current/destination file
637 fcd = local file context for current/destination file
638
638
639 Returns whether the merge is complete, the return value of the merge, and
639 Returns whether the merge is complete, the return value of the merge, and
640 a boolean indicating whether the file was deleted from disk."""
640 a boolean indicating whether the file was deleted from disk."""
641
641
642 if not fco.cmp(fcd): # files identical?
642 if not fco.cmp(fcd): # files identical?
643 return True, None, False
643 return True, None, False
644
644
645 ui = repo.ui
645 ui = repo.ui
646 fd = fcd.path()
646 fd = fcd.path()
647 binary = fcd.isbinary() or fco.isbinary() or fca.isbinary()
647 binary = fcd.isbinary() or fco.isbinary() or fca.isbinary()
648 symlink = 'l' in fcd.flags() + fco.flags()
648 symlink = 'l' in fcd.flags() + fco.flags()
649 changedelete = fcd.isabsent() or fco.isabsent()
649 changedelete = fcd.isabsent() or fco.isabsent()
650 tool, toolpath = _picktool(repo, ui, fd, binary, symlink, changedelete)
650 tool, toolpath = _picktool(repo, ui, fd, binary, symlink, changedelete)
651 if tool in internals and tool.startswith('internal:'):
651 if tool in internals and tool.startswith('internal:'):
652 # normalize to new-style names (':merge' etc)
652 # normalize to new-style names (':merge' etc)
653 tool = tool[len('internal'):]
653 tool = tool[len('internal'):]
654 ui.debug("picked tool '%s' for %s (binary %s symlink %s changedelete %s)\n"
654 ui.debug("picked tool '%s' for %s (binary %s symlink %s changedelete %s)\n"
655 % (tool, fd, pycompat.bytestr(binary), pycompat.bytestr(symlink),
655 % (tool, fd, pycompat.bytestr(binary), pycompat.bytestr(symlink),
656 pycompat.bytestr(changedelete)))
656 pycompat.bytestr(changedelete)))
657
657
658 if tool in internals:
658 if tool in internals:
659 func = internals[tool]
659 func = internals[tool]
660 mergetype = func.mergetype
660 mergetype = func.mergetype
661 onfailure = func.onfailure
661 onfailure = func.onfailure
662 precheck = func.precheck
662 precheck = func.precheck
663 else:
663 else:
664 func = _xmerge
664 func = _xmerge
665 mergetype = fullmerge
665 mergetype = fullmerge
666 onfailure = _("merging %s failed!\n")
666 onfailure = _("merging %s failed!\n")
667 precheck = None
667 precheck = None
668
668
669 # If using deferred writes, must flush any deferred contents if running
669 # If using deferred writes, must flush any deferred contents if running
670 # an external merge tool since it has arbitrary access to the working
670 # an external merge tool since it has arbitrary access to the working
671 # copy.
671 # copy.
672 wctx.flushall()
672 wctx.flushall()
673
673
674 toolconf = tool, toolpath, binary, symlink
674 toolconf = tool, toolpath, binary, symlink
675
675
676 if mergetype == nomerge:
676 if mergetype == nomerge:
677 r, deleted = func(repo, mynode, orig, fcd, fco, fca, toolconf, labels)
677 r, deleted = func(repo, mynode, orig, fcd, fco, fca, toolconf, labels)
678 return True, r, deleted
678 return True, r, deleted
679
679
680 if premerge:
680 if premerge:
681 if orig != fco.path():
681 if orig != fco.path():
682 ui.status(_("merging %s and %s to %s\n") % (orig, fco.path(), fd))
682 ui.status(_("merging %s and %s to %s\n") % (orig, fco.path(), fd))
683 else:
683 else:
684 ui.status(_("merging %s\n") % fd)
684 ui.status(_("merging %s\n") % fd)
685
685
686 ui.debug("my %s other %s ancestor %s\n" % (fcd, fco, fca))
686 ui.debug("my %s other %s ancestor %s\n" % (fcd, fco, fca))
687
687
688 if precheck and not precheck(repo, mynode, orig, fcd, fco, fca,
688 if precheck and not precheck(repo, mynode, orig, fcd, fco, fca,
689 toolconf):
689 toolconf):
690 if onfailure:
690 if onfailure:
691 ui.warn(onfailure % fd)
691 ui.warn(onfailure % fd)
692 return True, 1, False
692 return True, 1, False
693
693
694 back = _makebackup(repo, ui, fcd, premerge)
694 back = _makebackup(repo, ui, fcd, premerge)
695 files = (None, None, None, back)
695 files = (None, None, None, back)
696 r = 1
696 r = 1
697 try:
697 try:
698 markerstyle = ui.config('ui', 'mergemarkers')
698 markerstyle = ui.config('ui', 'mergemarkers')
699 if not labels:
699 if not labels:
700 labels = _defaultconflictlabels
700 labels = _defaultconflictlabels
701 if markerstyle != 'basic':
701 if markerstyle != 'basic':
702 labels = _formatlabels(repo, fcd, fco, fca, labels)
702 labels = _formatlabels(repo, fcd, fco, fca, labels)
703
703
704 if premerge and mergetype == fullmerge:
704 if premerge and mergetype == fullmerge:
705 r = _premerge(repo, fcd, fco, fca, toolconf, files, labels=labels)
705 r = _premerge(repo, fcd, fco, fca, toolconf, files, labels=labels)
706 # complete if premerge successful (r is 0)
706 # complete if premerge successful (r is 0)
707 return not r, r, False
707 return not r, r, False
708
708
709 needcheck, r, deleted = func(repo, mynode, orig, fcd, fco, fca,
709 needcheck, r, deleted = func(repo, mynode, orig, fcd, fco, fca,
710 toolconf, files, labels=labels)
710 toolconf, files, labels=labels)
711
711
712 if needcheck:
712 if needcheck:
713 r = _check(repo, r, ui, tool, fcd, files)
713 r = _check(repo, r, ui, tool, fcd, files)
714
714
715 if r:
715 if r:
716 if onfailure:
716 if onfailure:
717 ui.warn(onfailure % fd)
717 ui.warn(onfailure % fd)
718
718
719 return True, r, deleted
719 return True, r, deleted
720 finally:
720 finally:
721 if not r and back is not None:
721 if not r and back is not None:
722 util.unlink(back)
722 util.unlink(back)
723
723
724 def _check(repo, r, ui, tool, fcd, files):
724 def _check(repo, r, ui, tool, fcd, files):
725 fd = fcd.path()
725 fd = fcd.path()
726 unused, unused, unused, back = files
726 unused, unused, unused, back = files
727
727
728 if not r and (_toolbool(ui, tool, "checkconflicts") or
728 if not r and (_toolbool(ui, tool, "checkconflicts") or
729 'conflicts' in _toollist(ui, tool, "check")):
729 'conflicts' in _toollist(ui, tool, "check")):
730 if re.search("^(<<<<<<< .*|=======|>>>>>>> .*)$", fcd.data(),
730 if re.search("^(<<<<<<< .*|=======|>>>>>>> .*)$", fcd.data(),
731 re.MULTILINE):
731 re.MULTILINE):
732 r = 1
732 r = 1
733
733
734 checked = False
734 checked = False
735 if 'prompt' in _toollist(ui, tool, "check"):
735 if 'prompt' in _toollist(ui, tool, "check"):
736 checked = True
736 checked = True
737 if ui.promptchoice(_("was merge of '%s' successful (yn)?"
737 if ui.promptchoice(_("was merge of '%s' successful (yn)?"
738 "$$ &Yes $$ &No") % fd, 1):
738 "$$ &Yes $$ &No") % fd, 1):
739 r = 1
739 r = 1
740
740
741 if not r and not checked and (_toolbool(ui, tool, "checkchanged") or
741 if not r and not checked and (_toolbool(ui, tool, "checkchanged") or
742 'changed' in
742 'changed' in
743 _toollist(ui, tool, "check")):
743 _toollist(ui, tool, "check")):
744 if back is not None and filecmp.cmp(_workingpath(repo, fcd), back):
744 if back is not None and filecmp.cmp(_workingpath(repo, fcd), back):
745 if ui.promptchoice(_(" output file %s appears unchanged\n"
745 if ui.promptchoice(_(" output file %s appears unchanged\n"
746 "was merge successful (yn)?"
746 "was merge successful (yn)?"
747 "$$ &Yes $$ &No") % fd, 1):
747 "$$ &Yes $$ &No") % fd, 1):
748 r = 1
748 r = 1
749
749
750 if back is not None and _toolbool(ui, tool, "fixeol"):
750 if back is not None and _toolbool(ui, tool, "fixeol"):
751 _matcheol(_workingpath(repo, fcd), back)
751 _matcheol(_workingpath(repo, fcd), back)
752
752
753 return r
753 return r
754
754
755 def _workingpath(repo, ctx):
755 def _workingpath(repo, ctx):
756 return repo.wjoin(ctx.path())
756 return repo.wjoin(ctx.path())
757
757
758 def premerge(repo, wctx, mynode, orig, fcd, fco, fca, labels=None):
758 def premerge(repo, wctx, mynode, orig, fcd, fco, fca, labels=None):
759 return _filemerge(True, repo, wctx, mynode, orig, fcd, fco, fca,
759 return _filemerge(True, repo, wctx, mynode, orig, fcd, fco, fca,
760 labels=labels)
760 labels=labels)
761
761
762 def filemerge(repo, wctx, mynode, orig, fcd, fco, fca, labels=None):
762 def filemerge(repo, wctx, mynode, orig, fcd, fco, fca, labels=None):
763 return _filemerge(False, repo, wctx, mynode, orig, fcd, fco, fca,
763 return _filemerge(False, repo, wctx, mynode, orig, fcd, fco, fca,
764 labels=labels)
764 labels=labels)
765
765
766 def loadinternalmerge(ui, extname, registrarobj):
766 def loadinternalmerge(ui, extname, registrarobj):
767 """Load internal merge tool from specified registrarobj
767 """Load internal merge tool from specified registrarobj
768 """
768 """
769 for name, func in registrarobj._table.iteritems():
769 for name, func in registrarobj._table.iteritems():
770 fullname = ':' + name
770 fullname = ':' + name
771 internals[fullname] = func
771 internals[fullname] = func
772 internals['internal:' + name] = func
772 internals['internal:' + name] = func
773 internalsdoc[fullname] = func
773 internalsdoc[fullname] = func
774
774
775 # load built-in merge tools explicitly to setup internalsdoc
775 # load built-in merge tools explicitly to setup internalsdoc
776 loadinternalmerge(None, None, internaltool)
776 loadinternalmerge(None, None, internaltool)
777
777
778 # tell hggettext to extract docstrings from these functions:
778 # tell hggettext to extract docstrings from these functions:
779 i18nfunctions = internals.values()
779 i18nfunctions = internals.values()
@@ -1,491 +1,491 b''
1 # mdiff.py - diff and patch routines for mercurial
1 # mdiff.py - diff and patch routines for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import re
10 import re
11 import struct
11 import struct
12 import zlib
12 import zlib
13
13
14 from .i18n import _
14 from .i18n import _
15 from . import (
15 from . import (
16 error,
16 error,
17 policy,
17 policy,
18 pycompat,
18 pycompat,
19 util,
19 util,
20 )
20 )
21
21
22 bdiff = policy.importmod(r'bdiff')
22 bdiff = policy.importmod(r'bdiff')
23 mpatch = policy.importmod(r'mpatch')
23 mpatch = policy.importmod(r'mpatch')
24
24
25 blocks = bdiff.blocks
25 blocks = bdiff.blocks
26 fixws = bdiff.fixws
26 fixws = bdiff.fixws
27 patches = mpatch.patches
27 patches = mpatch.patches
28 patchedsize = mpatch.patchedsize
28 patchedsize = mpatch.patchedsize
29 textdiff = bdiff.bdiff
29 textdiff = bdiff.bdiff
30
30
31 def splitnewlines(text):
31 def splitnewlines(text):
32 '''like str.splitlines, but only split on newlines.'''
32 '''like str.splitlines, but only split on newlines.'''
33 lines = [l + '\n' for l in text.split('\n')]
33 lines = [l + '\n' for l in text.split('\n')]
34 if lines:
34 if lines:
35 if lines[-1] == '\n':
35 if lines[-1] == '\n':
36 lines.pop()
36 lines.pop()
37 else:
37 else:
38 lines[-1] = lines[-1][:-1]
38 lines[-1] = lines[-1][:-1]
39 return lines
39 return lines
40
40
41 class diffopts(object):
41 class diffopts(object):
42 '''context is the number of context lines
42 '''context is the number of context lines
43 text treats all files as text
43 text treats all files as text
44 showfunc enables diff -p output
44 showfunc enables diff -p output
45 git enables the git extended patch format
45 git enables the git extended patch format
46 nodates removes dates from diff headers
46 nodates removes dates from diff headers
47 nobinary ignores binary files
47 nobinary ignores binary files
48 noprefix disables the 'a/' and 'b/' prefixes (ignored in plain mode)
48 noprefix disables the 'a/' and 'b/' prefixes (ignored in plain mode)
49 ignorews ignores all whitespace changes in the diff
49 ignorews ignores all whitespace changes in the diff
50 ignorewsamount ignores changes in the amount of whitespace
50 ignorewsamount ignores changes in the amount of whitespace
51 ignoreblanklines ignores changes whose lines are all blank
51 ignoreblanklines ignores changes whose lines are all blank
52 upgrade generates git diffs to avoid data loss
52 upgrade generates git diffs to avoid data loss
53 '''
53 '''
54
54
55 defaults = {
55 defaults = {
56 'context': 3,
56 'context': 3,
57 'text': False,
57 'text': False,
58 'showfunc': False,
58 'showfunc': False,
59 'git': False,
59 'git': False,
60 'nodates': False,
60 'nodates': False,
61 'nobinary': False,
61 'nobinary': False,
62 'noprefix': False,
62 'noprefix': False,
63 'index': 0,
63 'index': 0,
64 'ignorews': False,
64 'ignorews': False,
65 'ignorewsamount': False,
65 'ignorewsamount': False,
66 'ignorewseol': False,
66 'ignorewseol': False,
67 'ignoreblanklines': False,
67 'ignoreblanklines': False,
68 'upgrade': False,
68 'upgrade': False,
69 'showsimilarity': False,
69 'showsimilarity': False,
70 }
70 }
71
71
72 def __init__(self, **opts):
72 def __init__(self, **opts):
73 opts = pycompat.byteskwargs(opts)
73 opts = pycompat.byteskwargs(opts)
74 for k in self.defaults.keys():
74 for k in self.defaults.keys():
75 v = opts.get(k)
75 v = opts.get(k)
76 if v is None:
76 if v is None:
77 v = self.defaults[k]
77 v = self.defaults[k]
78 setattr(self, k, v)
78 setattr(self, k, v)
79
79
80 try:
80 try:
81 self.context = int(self.context)
81 self.context = int(self.context)
82 except ValueError:
82 except ValueError:
83 raise error.Abort(_('diff context lines count must be '
83 raise error.Abort(_('diff context lines count must be '
84 'an integer, not %r') % self.context)
84 'an integer, not %r') % self.context)
85
85
86 def copy(self, **kwargs):
86 def copy(self, **kwargs):
87 opts = dict((k, getattr(self, k)) for k in self.defaults)
87 opts = dict((k, getattr(self, k)) for k in self.defaults)
88 opts = pycompat.strkwargs(opts)
88 opts = pycompat.strkwargs(opts)
89 opts.update(kwargs)
89 opts.update(kwargs)
90 return diffopts(**opts)
90 return diffopts(**opts)
91
91
92 defaultopts = diffopts()
92 defaultopts = diffopts()
93
93
94 def wsclean(opts, text, blank=True):
94 def wsclean(opts, text, blank=True):
95 if opts.ignorews:
95 if opts.ignorews:
96 text = bdiff.fixws(text, 1)
96 text = bdiff.fixws(text, 1)
97 elif opts.ignorewsamount:
97 elif opts.ignorewsamount:
98 text = bdiff.fixws(text, 0)
98 text = bdiff.fixws(text, 0)
99 if blank and opts.ignoreblanklines:
99 if blank and opts.ignoreblanklines:
100 text = re.sub('\n+', '\n', text).strip('\n')
100 text = re.sub('\n+', '\n', text).strip('\n')
101 if opts.ignorewseol:
101 if opts.ignorewseol:
102 text = re.sub(r'[ \t\r\f]+\n', r'\n', text)
102 text = re.sub(r'[ \t\r\f]+\n', r'\n', text)
103 return text
103 return text
104
104
105 def splitblock(base1, lines1, base2, lines2, opts):
105 def splitblock(base1, lines1, base2, lines2, opts):
106 # The input lines matches except for interwoven blank lines. We
106 # The input lines matches except for interwoven blank lines. We
107 # transform it into a sequence of matching blocks and blank blocks.
107 # transform it into a sequence of matching blocks and blank blocks.
108 lines1 = [(wsclean(opts, l) and 1 or 0) for l in lines1]
108 lines1 = [(wsclean(opts, l) and 1 or 0) for l in lines1]
109 lines2 = [(wsclean(opts, l) and 1 or 0) for l in lines2]
109 lines2 = [(wsclean(opts, l) and 1 or 0) for l in lines2]
110 s1, e1 = 0, len(lines1)
110 s1, e1 = 0, len(lines1)
111 s2, e2 = 0, len(lines2)
111 s2, e2 = 0, len(lines2)
112 while s1 < e1 or s2 < e2:
112 while s1 < e1 or s2 < e2:
113 i1, i2, btype = s1, s2, '='
113 i1, i2, btype = s1, s2, '='
114 if (i1 >= e1 or lines1[i1] == 0
114 if (i1 >= e1 or lines1[i1] == 0
115 or i2 >= e2 or lines2[i2] == 0):
115 or i2 >= e2 or lines2[i2] == 0):
116 # Consume the block of blank lines
116 # Consume the block of blank lines
117 btype = '~'
117 btype = '~'
118 while i1 < e1 and lines1[i1] == 0:
118 while i1 < e1 and lines1[i1] == 0:
119 i1 += 1
119 i1 += 1
120 while i2 < e2 and lines2[i2] == 0:
120 while i2 < e2 and lines2[i2] == 0:
121 i2 += 1
121 i2 += 1
122 else:
122 else:
123 # Consume the matching lines
123 # Consume the matching lines
124 while i1 < e1 and lines1[i1] == 1 and lines2[i2] == 1:
124 while i1 < e1 and lines1[i1] == 1 and lines2[i2] == 1:
125 i1 += 1
125 i1 += 1
126 i2 += 1
126 i2 += 1
127 yield [base1 + s1, base1 + i1, base2 + s2, base2 + i2], btype
127 yield [base1 + s1, base1 + i1, base2 + s2, base2 + i2], btype
128 s1 = i1
128 s1 = i1
129 s2 = i2
129 s2 = i2
130
130
131 def hunkinrange(hunk, linerange):
131 def hunkinrange(hunk, linerange):
132 """Return True if `hunk` defined as (start, length) is in `linerange`
132 """Return True if `hunk` defined as (start, length) is in `linerange`
133 defined as (lowerbound, upperbound).
133 defined as (lowerbound, upperbound).
134
134
135 >>> hunkinrange((5, 10), (2, 7))
135 >>> hunkinrange((5, 10), (2, 7))
136 True
136 True
137 >>> hunkinrange((5, 10), (6, 12))
137 >>> hunkinrange((5, 10), (6, 12))
138 True
138 True
139 >>> hunkinrange((5, 10), (13, 17))
139 >>> hunkinrange((5, 10), (13, 17))
140 True
140 True
141 >>> hunkinrange((5, 10), (3, 17))
141 >>> hunkinrange((5, 10), (3, 17))
142 True
142 True
143 >>> hunkinrange((5, 10), (1, 3))
143 >>> hunkinrange((5, 10), (1, 3))
144 False
144 False
145 >>> hunkinrange((5, 10), (18, 20))
145 >>> hunkinrange((5, 10), (18, 20))
146 False
146 False
147 >>> hunkinrange((5, 10), (1, 5))
147 >>> hunkinrange((5, 10), (1, 5))
148 False
148 False
149 >>> hunkinrange((5, 10), (15, 27))
149 >>> hunkinrange((5, 10), (15, 27))
150 False
150 False
151 """
151 """
152 start, length = hunk
152 start, length = hunk
153 lowerbound, upperbound = linerange
153 lowerbound, upperbound = linerange
154 return lowerbound < start + length and start < upperbound
154 return lowerbound < start + length and start < upperbound
155
155
156 def blocksinrange(blocks, rangeb):
156 def blocksinrange(blocks, rangeb):
157 """filter `blocks` like (a1, a2, b1, b2) from items outside line range
157 """filter `blocks` like (a1, a2, b1, b2) from items outside line range
158 `rangeb` from ``(b1, b2)`` point of view.
158 `rangeb` from ``(b1, b2)`` point of view.
159
159
160 Return `filteredblocks, rangea` where:
160 Return `filteredblocks, rangea` where:
161
161
162 * `filteredblocks` is list of ``block = (a1, a2, b1, b2), stype`` items of
162 * `filteredblocks` is list of ``block = (a1, a2, b1, b2), stype`` items of
163 `blocks` that are inside `rangeb` from ``(b1, b2)`` point of view; a
163 `blocks` that are inside `rangeb` from ``(b1, b2)`` point of view; a
164 block ``(b1, b2)`` being inside `rangeb` if
164 block ``(b1, b2)`` being inside `rangeb` if
165 ``rangeb[0] < b2 and b1 < rangeb[1]``;
165 ``rangeb[0] < b2 and b1 < rangeb[1]``;
166 * `rangea` is the line range w.r.t. to ``(a1, a2)`` parts of `blocks`.
166 * `rangea` is the line range w.r.t. to ``(a1, a2)`` parts of `blocks`.
167 """
167 """
168 lbb, ubb = rangeb
168 lbb, ubb = rangeb
169 lba, uba = None, None
169 lba, uba = None, None
170 filteredblocks = []
170 filteredblocks = []
171 for block in blocks:
171 for block in blocks:
172 (a1, a2, b1, b2), stype = block
172 (a1, a2, b1, b2), stype = block
173 if lbb >= b1 and ubb <= b2 and stype == '=':
173 if lbb >= b1 and ubb <= b2 and stype == '=':
174 # rangeb is within a single "=" hunk, restrict back linerange1
174 # rangeb is within a single "=" hunk, restrict back linerange1
175 # by offsetting rangeb
175 # by offsetting rangeb
176 lba = lbb - b1 + a1
176 lba = lbb - b1 + a1
177 uba = ubb - b1 + a1
177 uba = ubb - b1 + a1
178 else:
178 else:
179 if b1 <= lbb < b2:
179 if b1 <= lbb < b2:
180 if stype == '=':
180 if stype == '=':
181 lba = a2 - (b2 - lbb)
181 lba = a2 - (b2 - lbb)
182 else:
182 else:
183 lba = a1
183 lba = a1
184 if b1 < ubb <= b2:
184 if b1 < ubb <= b2:
185 if stype == '=':
185 if stype == '=':
186 uba = a1 + (ubb - b1)
186 uba = a1 + (ubb - b1)
187 else:
187 else:
188 uba = a2
188 uba = a2
189 if hunkinrange((b1, (b2 - b1)), rangeb):
189 if hunkinrange((b1, (b2 - b1)), rangeb):
190 filteredblocks.append(block)
190 filteredblocks.append(block)
191 if lba is None or uba is None or uba < lba:
191 if lba is None or uba is None or uba < lba:
192 raise error.Abort(_('line range exceeds file size'))
192 raise error.Abort(_('line range exceeds file size'))
193 return filteredblocks, (lba, uba)
193 return filteredblocks, (lba, uba)
194
194
195 def allblocks(text1, text2, opts=None, lines1=None, lines2=None):
195 def allblocks(text1, text2, opts=None, lines1=None, lines2=None):
196 """Return (block, type) tuples, where block is an mdiff.blocks
196 """Return (block, type) tuples, where block is an mdiff.blocks
197 line entry. type is '=' for blocks matching exactly one another
197 line entry. type is '=' for blocks matching exactly one another
198 (bdiff blocks), '!' for non-matching blocks and '~' for blocks
198 (bdiff blocks), '!' for non-matching blocks and '~' for blocks
199 matching only after having filtered blank lines.
199 matching only after having filtered blank lines.
200 line1 and line2 are text1 and text2 split with splitnewlines() if
200 line1 and line2 are text1 and text2 split with splitnewlines() if
201 they are already available.
201 they are already available.
202 """
202 """
203 if opts is None:
203 if opts is None:
204 opts = defaultopts
204 opts = defaultopts
205 if opts.ignorews or opts.ignorewsamount or opts.ignorewseol:
205 if opts.ignorews or opts.ignorewsamount or opts.ignorewseol:
206 text1 = wsclean(opts, text1, False)
206 text1 = wsclean(opts, text1, False)
207 text2 = wsclean(opts, text2, False)
207 text2 = wsclean(opts, text2, False)
208 diff = bdiff.blocks(text1, text2)
208 diff = bdiff.blocks(text1, text2)
209 for i, s1 in enumerate(diff):
209 for i, s1 in enumerate(diff):
210 # The first match is special.
210 # The first match is special.
211 # we've either found a match starting at line 0 or a match later
211 # we've either found a match starting at line 0 or a match later
212 # in the file. If it starts later, old and new below will both be
212 # in the file. If it starts later, old and new below will both be
213 # empty and we'll continue to the next match.
213 # empty and we'll continue to the next match.
214 if i > 0:
214 if i > 0:
215 s = diff[i - 1]
215 s = diff[i - 1]
216 else:
216 else:
217 s = [0, 0, 0, 0]
217 s = [0, 0, 0, 0]
218 s = [s[1], s1[0], s[3], s1[2]]
218 s = [s[1], s1[0], s[3], s1[2]]
219
219
220 # bdiff sometimes gives huge matches past eof, this check eats them,
220 # bdiff sometimes gives huge matches past eof, this check eats them,
221 # and deals with the special first match case described above
221 # and deals with the special first match case described above
222 if s[0] != s[1] or s[2] != s[3]:
222 if s[0] != s[1] or s[2] != s[3]:
223 type = '!'
223 type = '!'
224 if opts.ignoreblanklines:
224 if opts.ignoreblanklines:
225 if lines1 is None:
225 if lines1 is None:
226 lines1 = splitnewlines(text1)
226 lines1 = splitnewlines(text1)
227 if lines2 is None:
227 if lines2 is None:
228 lines2 = splitnewlines(text2)
228 lines2 = splitnewlines(text2)
229 old = wsclean(opts, "".join(lines1[s[0]:s[1]]))
229 old = wsclean(opts, "".join(lines1[s[0]:s[1]]))
230 new = wsclean(opts, "".join(lines2[s[2]:s[3]]))
230 new = wsclean(opts, "".join(lines2[s[2]:s[3]]))
231 if old == new:
231 if old == new:
232 type = '~'
232 type = '~'
233 yield s, type
233 yield s, type
234 yield s1, '='
234 yield s1, '='
235
235
236 def unidiff(a, ad, b, bd, fn1, fn2, opts=defaultopts):
236 def unidiff(a, ad, b, bd, fn1, fn2, opts=defaultopts):
237 """Return a unified diff as a (headers, hunks) tuple.
237 """Return a unified diff as a (headers, hunks) tuple.
238
238
239 If the diff is not null, `headers` is a list with unified diff header
239 If the diff is not null, `headers` is a list with unified diff header
240 lines "--- <original>" and "+++ <new>" and `hunks` is a generator yielding
240 lines "--- <original>" and "+++ <new>" and `hunks` is a generator yielding
241 (hunkrange, hunklines) coming from _unidiff().
241 (hunkrange, hunklines) coming from _unidiff().
242 Otherwise, `headers` and `hunks` are empty.
242 Otherwise, `headers` and `hunks` are empty.
243 """
243 """
244 def datetag(date, fn=None):
244 def datetag(date, fn=None):
245 if not opts.git and not opts.nodates:
245 if not opts.git and not opts.nodates:
246 return '\t%s' % date
246 return '\t%s' % date
247 if fn and ' ' in fn:
247 if fn and ' ' in fn:
248 return '\t'
248 return '\t'
249 return ''
249 return ''
250
250
251 sentinel = [], ()
251 sentinel = [], ()
252 if not a and not b:
252 if not a and not b:
253 return sentinel
253 return sentinel
254
254
255 if opts.noprefix:
255 if opts.noprefix:
256 aprefix = bprefix = ''
256 aprefix = bprefix = ''
257 else:
257 else:
258 aprefix = 'a/'
258 aprefix = 'a/'
259 bprefix = 'b/'
259 bprefix = 'b/'
260
260
261 epoch = util.datestr((0, 0))
261 epoch = util.datestr((0, 0))
262
262
263 fn1 = util.pconvert(fn1)
263 fn1 = util.pconvert(fn1)
264 fn2 = util.pconvert(fn2)
264 fn2 = util.pconvert(fn2)
265
265
266 def checknonewline(lines):
266 def checknonewline(lines):
267 for text in lines:
267 for text in lines:
268 if text[-1:] != '\n':
268 if text[-1:] != '\n':
269 text += "\n\ No newline at end of file\n"
269 text += "\n\ No newline at end of file\n"
270 yield text
270 yield text
271
271
272 if not opts.text and (util.binary(a) or util.binary(b)):
272 if not opts.text and (util.binary(a) or util.binary(b)):
273 if a and b and len(a) == len(b) and a == b:
273 if a and b and len(a) == len(b) and a == b:
274 return sentinel
274 return sentinel
275 headerlines = []
275 headerlines = []
276 hunks = (None, ['Binary file %s has changed\n' % fn1]),
276 hunks = (None, ['Binary file %s has changed\n' % fn1]),
277 elif not a:
277 elif not a:
278 b = splitnewlines(b)
278 b = splitnewlines(b)
279 if a is None:
279 if a is None:
280 l1 = '--- /dev/null%s' % datetag(epoch)
280 l1 = '--- /dev/null%s' % datetag(epoch)
281 else:
281 else:
282 l1 = "--- %s%s%s" % (aprefix, fn1, datetag(ad, fn1))
282 l1 = "--- %s%s%s" % (aprefix, fn1, datetag(ad, fn1))
283 l2 = "+++ %s%s" % (bprefix + fn2, datetag(bd, fn2))
283 l2 = "+++ %s%s" % (bprefix + fn2, datetag(bd, fn2))
284 headerlines = [l1, l2]
284 headerlines = [l1, l2]
285 size = len(b)
285 size = len(b)
286 hunkrange = (0, 0, 1, size)
286 hunkrange = (0, 0, 1, size)
287 hunklines = ["@@ -0,0 +1,%d @@\n" % size] + ["+" + e for e in b]
287 hunklines = ["@@ -0,0 +1,%d @@\n" % size] + ["+" + e for e in b]
288 hunks = (hunkrange, checknonewline(hunklines)),
288 hunks = (hunkrange, checknonewline(hunklines)),
289 elif not b:
289 elif not b:
290 a = splitnewlines(a)
290 a = splitnewlines(a)
291 l1 = "--- %s%s%s" % (aprefix, fn1, datetag(ad, fn1))
291 l1 = "--- %s%s%s" % (aprefix, fn1, datetag(ad, fn1))
292 if b is None:
292 if b is None:
293 l2 = '+++ /dev/null%s' % datetag(epoch)
293 l2 = '+++ /dev/null%s' % datetag(epoch)
294 else:
294 else:
295 l2 = "+++ %s%s%s" % (bprefix, fn2, datetag(bd, fn2))
295 l2 = "+++ %s%s%s" % (bprefix, fn2, datetag(bd, fn2))
296 headerlines = [l1, l2]
296 headerlines = [l1, l2]
297 size = len(a)
297 size = len(a)
298 hunkrange = (1, size, 0, 0)
298 hunkrange = (1, size, 0, 0)
299 hunklines = ["@@ -1,%d +0,0 @@\n" % size] + ["-" + e for e in a]
299 hunklines = ["@@ -1,%d +0,0 @@\n" % size] + ["-" + e for e in a]
300 hunks = (hunkrange, checknonewline(hunklines)),
300 hunks = (hunkrange, checknonewline(hunklines)),
301 else:
301 else:
302 diffhunks = _unidiff(a, b, opts=opts)
302 diffhunks = _unidiff(a, b, opts=opts)
303 try:
303 try:
304 hunkrange, hunklines = next(diffhunks)
304 hunkrange, hunklines = next(diffhunks)
305 except StopIteration:
305 except StopIteration:
306 return sentinel
306 return sentinel
307
307
308 headerlines = [
308 headerlines = [
309 "--- %s%s%s" % (aprefix, fn1, datetag(ad, fn1)),
309 "--- %s%s%s" % (aprefix, fn1, datetag(ad, fn1)),
310 "+++ %s%s%s" % (bprefix, fn2, datetag(bd, fn2)),
310 "+++ %s%s%s" % (bprefix, fn2, datetag(bd, fn2)),
311 ]
311 ]
312 def rewindhunks():
312 def rewindhunks():
313 yield hunkrange, checknonewline(hunklines)
313 yield hunkrange, checknonewline(hunklines)
314 for hr, hl in diffhunks:
314 for hr, hl in diffhunks:
315 yield hr, checknonewline(hl)
315 yield hr, checknonewline(hl)
316
316
317 hunks = rewindhunks()
317 hunks = rewindhunks()
318
318
319 return headerlines, hunks
319 return headerlines, hunks
320
320
321 def _unidiff(t1, t2, opts=defaultopts):
321 def _unidiff(t1, t2, opts=defaultopts):
322 """Yield hunks of a headerless unified diff from t1 and t2 texts.
322 """Yield hunks of a headerless unified diff from t1 and t2 texts.
323
323
324 Each hunk consists of a (hunkrange, hunklines) tuple where `hunkrange` is a
324 Each hunk consists of a (hunkrange, hunklines) tuple where `hunkrange` is a
325 tuple (s1, l1, s2, l2) representing the range information of the hunk to
325 tuple (s1, l1, s2, l2) representing the range information of the hunk to
326 form the '@@ -s1,l1 +s2,l2 @@' header and `hunklines` is a list of lines
326 form the '@@ -s1,l1 +s2,l2 @@' header and `hunklines` is a list of lines
327 of the hunk combining said header followed by line additions and
327 of the hunk combining said header followed by line additions and
328 deletions.
328 deletions.
329 """
329 """
330 l1 = splitnewlines(t1)
330 l1 = splitnewlines(t1)
331 l2 = splitnewlines(t2)
331 l2 = splitnewlines(t2)
332 def contextend(l, len):
332 def contextend(l, len):
333 ret = l + opts.context
333 ret = l + opts.context
334 if ret > len:
334 if ret > len:
335 ret = len
335 ret = len
336 return ret
336 return ret
337
337
338 def contextstart(l):
338 def contextstart(l):
339 ret = l - opts.context
339 ret = l - opts.context
340 if ret < 0:
340 if ret < 0:
341 return 0
341 return 0
342 return ret
342 return ret
343
343
344 lastfunc = [0, '']
344 lastfunc = [0, '']
345 def yieldhunk(hunk):
345 def yieldhunk(hunk):
346 (astart, a2, bstart, b2, delta) = hunk
346 (astart, a2, bstart, b2, delta) = hunk
347 aend = contextend(a2, len(l1))
347 aend = contextend(a2, len(l1))
348 alen = aend - astart
348 alen = aend - astart
349 blen = b2 - bstart + aend - a2
349 blen = b2 - bstart + aend - a2
350
350
351 func = ""
351 func = ""
352 if opts.showfunc:
352 if opts.showfunc:
353 lastpos, func = lastfunc
353 lastpos, func = lastfunc
354 # walk backwards from the start of the context up to the start of
354 # walk backwards from the start of the context up to the start of
355 # the previous hunk context until we find a line starting with an
355 # the previous hunk context until we find a line starting with an
356 # alphanumeric char.
356 # alphanumeric char.
357 for i in xrange(astart - 1, lastpos - 1, -1):
357 for i in xrange(astart - 1, lastpos - 1, -1):
358 if l1[i][0].isalnum():
358 if l1[i][0].isalnum():
359 func = ' ' + l1[i].rstrip()[:40]
359 func = ' ' + l1[i].rstrip()[:40]
360 lastfunc[1] = func
360 lastfunc[1] = func
361 break
361 break
362 # by recording this hunk's starting point as the next place to
362 # by recording this hunk's starting point as the next place to
363 # start looking for function lines, we avoid reading any line in
363 # start looking for function lines, we avoid reading any line in
364 # the file more than once.
364 # the file more than once.
365 lastfunc[0] = astart
365 lastfunc[0] = astart
366
366
367 # zero-length hunk ranges report their start line as one less
367 # zero-length hunk ranges report their start line as one less
368 if alen:
368 if alen:
369 astart += 1
369 astart += 1
370 if blen:
370 if blen:
371 bstart += 1
371 bstart += 1
372
372
373 hunkrange = astart, alen, bstart, blen
373 hunkrange = astart, alen, bstart, blen
374 hunklines = (
374 hunklines = (
375 ["@@ -%d,%d +%d,%d @@%s\n" % (hunkrange + (func,))]
375 ["@@ -%d,%d +%d,%d @@%s\n" % (hunkrange + (func,))]
376 + delta
376 + delta
377 + [' ' + l1[x] for x in xrange(a2, aend)]
377 + [' ' + l1[x] for x in xrange(a2, aend)]
378 )
378 )
379 yield hunkrange, hunklines
379 yield hunkrange, hunklines
380
380
381 # bdiff.blocks gives us the matching sequences in the files. The loop
381 # bdiff.blocks gives us the matching sequences in the files. The loop
382 # below finds the spaces between those matching sequences and translates
382 # below finds the spaces between those matching sequences and translates
383 # them into diff output.
383 # them into diff output.
384 #
384 #
385 hunk = None
385 hunk = None
386 ignoredlines = 0
386 ignoredlines = 0
387 for s, stype in allblocks(t1, t2, opts, l1, l2):
387 for s, stype in allblocks(t1, t2, opts, l1, l2):
388 a1, a2, b1, b2 = s
388 a1, a2, b1, b2 = s
389 if stype != '!':
389 if stype != '!':
390 if stype == '~':
390 if stype == '~':
391 # The diff context lines are based on t1 content. When
391 # The diff context lines are based on t1 content. When
392 # blank lines are ignored, the new lines offsets must
392 # blank lines are ignored, the new lines offsets must
393 # be adjusted as if equivalent blocks ('~') had the
393 # be adjusted as if equivalent blocks ('~') had the
394 # same sizes on both sides.
394 # same sizes on both sides.
395 ignoredlines += (b2 - b1) - (a2 - a1)
395 ignoredlines += (b2 - b1) - (a2 - a1)
396 continue
396 continue
397 delta = []
397 delta = []
398 old = l1[a1:a2]
398 old = l1[a1:a2]
399 new = l2[b1:b2]
399 new = l2[b1:b2]
400
400
401 b1 -= ignoredlines
401 b1 -= ignoredlines
402 b2 -= ignoredlines
402 b2 -= ignoredlines
403 astart = contextstart(a1)
403 astart = contextstart(a1)
404 bstart = contextstart(b1)
404 bstart = contextstart(b1)
405 prev = None
405 prev = None
406 if hunk:
406 if hunk:
407 # join with the previous hunk if it falls inside the context
407 # join with the previous hunk if it falls inside the context
408 if astart < hunk[1] + opts.context + 1:
408 if astart < hunk[1] + opts.context + 1:
409 prev = hunk
409 prev = hunk
410 astart = hunk[1]
410 astart = hunk[1]
411 bstart = hunk[3]
411 bstart = hunk[3]
412 else:
412 else:
413 for x in yieldhunk(hunk):
413 for x in yieldhunk(hunk):
414 yield x
414 yield x
415 if prev:
415 if prev:
416 # we've joined the previous hunk, record the new ending points.
416 # we've joined the previous hunk, record the new ending points.
417 hunk[1] = a2
417 hunk[1] = a2
418 hunk[3] = b2
418 hunk[3] = b2
419 delta = hunk[4]
419 delta = hunk[4]
420 else:
420 else:
421 # create a new hunk
421 # create a new hunk
422 hunk = [astart, a2, bstart, b2, delta]
422 hunk = [astart, a2, bstart, b2, delta]
423
423
424 delta[len(delta):] = [' ' + x for x in l1[astart:a1]]
424 delta[len(delta):] = [' ' + x for x in l1[astart:a1]]
425 delta[len(delta):] = ['-' + x for x in old]
425 delta[len(delta):] = ['-' + x for x in old]
426 delta[len(delta):] = ['+' + x for x in new]
426 delta[len(delta):] = ['+' + x for x in new]
427
427
428 if hunk:
428 if hunk:
429 for x in yieldhunk(hunk):
429 for x in yieldhunk(hunk):
430 yield x
430 yield x
431
431
432 def b85diff(to, tn):
432 def b85diff(to, tn):
433 '''print base85-encoded binary diff'''
433 '''print base85-encoded binary diff'''
434 def fmtline(line):
434 def fmtline(line):
435 l = len(line)
435 l = len(line)
436 if l <= 26:
436 if l <= 26:
437 l = chr(ord('A') + l - 1)
437 l = chr(ord('A') + l - 1)
438 else:
438 else:
439 l = chr(l - 26 + ord('a') - 1)
439 l = chr(l - 26 + ord('a') - 1)
440 return '%c%s\n' % (l, util.b85encode(line, True))
440 return '%c%s\n' % (l, util.b85encode(line, True))
441
441
442 def chunk(text, csize=52):
442 def chunk(text, csize=52):
443 l = len(text)
443 l = len(text)
444 i = 0
444 i = 0
445 while i < l:
445 while i < l:
446 yield text[i:i + csize]
446 yield text[i:i + csize]
447 i += csize
447 i += csize
448
448
449 if to is None:
449 if to is None:
450 to = ''
450 to = ''
451 if tn is None:
451 if tn is None:
452 tn = ''
452 tn = ''
453
453
454 if to == tn:
454 if to == tn:
455 return ''
455 return ''
456
456
457 # TODO: deltas
457 # TODO: deltas
458 ret = []
458 ret = []
459 ret.append('GIT binary patch\n')
459 ret.append('GIT binary patch\n')
460 ret.append('literal %s\n' % len(tn))
460 ret.append('literal %d\n' % len(tn))
461 for l in chunk(zlib.compress(tn)):
461 for l in chunk(zlib.compress(tn)):
462 ret.append(fmtline(l))
462 ret.append(fmtline(l))
463 ret.append('\n')
463 ret.append('\n')
464
464
465 return ''.join(ret)
465 return ''.join(ret)
466
466
467 def patchtext(bin):
467 def patchtext(bin):
468 pos = 0
468 pos = 0
469 t = []
469 t = []
470 while pos < len(bin):
470 while pos < len(bin):
471 p1, p2, l = struct.unpack(">lll", bin[pos:pos + 12])
471 p1, p2, l = struct.unpack(">lll", bin[pos:pos + 12])
472 pos += 12
472 pos += 12
473 t.append(bin[pos:pos + l])
473 t.append(bin[pos:pos + l])
474 pos += l
474 pos += l
475 return "".join(t)
475 return "".join(t)
476
476
477 def patch(a, bin):
477 def patch(a, bin):
478 if len(a) == 0:
478 if len(a) == 0:
479 # skip over trivial delta header
479 # skip over trivial delta header
480 return util.buffer(bin, 12)
480 return util.buffer(bin, 12)
481 return mpatch.patches(a, [bin])
481 return mpatch.patches(a, [bin])
482
482
483 # similar to difflib.SequenceMatcher.get_matching_blocks
483 # similar to difflib.SequenceMatcher.get_matching_blocks
484 def get_matching_blocks(a, b):
484 def get_matching_blocks(a, b):
485 return [(d[0], d[2], d[1] - d[0]) for d in bdiff.blocks(a, b)]
485 return [(d[0], d[2], d[1] - d[0]) for d in bdiff.blocks(a, b)]
486
486
487 def trivialdiffheader(length):
487 def trivialdiffheader(length):
488 return struct.pack(">lll", 0, 0, length) if length else ''
488 return struct.pack(">lll", 0, 0, length) if length else ''
489
489
490 def replacediffheader(oldlen, newlen):
490 def replacediffheader(oldlen, newlen):
491 return struct.pack(">lll", 0, oldlen, newlen)
491 return struct.pack(">lll", 0, oldlen, newlen)
General Comments 0
You need to be logged in to leave comments. Login now