Show More
@@ -1,1274 +1,1270 b'' | |||||
1 | # filemerge.py - file-level merge handling for Mercurial |
|
1 | # filemerge.py - file-level merge handling for Mercurial | |
2 | # |
|
2 | # | |
3 | # Copyright 2006, 2007, 2008 Olivia Mackall <olivia@selenic.com> |
|
3 | # Copyright 2006, 2007, 2008 Olivia Mackall <olivia@selenic.com> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | from __future__ import absolute_import |
|
8 | from __future__ import absolute_import | |
9 |
|
9 | |||
10 | import contextlib |
|
10 | import contextlib | |
11 | import os |
|
11 | import os | |
12 | import re |
|
12 | import re | |
13 | import shutil |
|
13 | import shutil | |
14 |
|
14 | |||
15 | from .i18n import _ |
|
15 | from .i18n import _ | |
16 | from .node import ( |
|
16 | from .node import ( | |
17 | hex, |
|
17 | hex, | |
18 | short, |
|
18 | short, | |
19 | ) |
|
19 | ) | |
20 | from .pycompat import ( |
|
20 | from .pycompat import ( | |
21 | getattr, |
|
21 | getattr, | |
22 | open, |
|
22 | open, | |
23 | ) |
|
23 | ) | |
24 |
|
24 | |||
25 | from . import ( |
|
25 | from . import ( | |
26 | encoding, |
|
26 | encoding, | |
27 | error, |
|
27 | error, | |
28 | formatter, |
|
28 | formatter, | |
29 | match, |
|
29 | match, | |
30 | pycompat, |
|
30 | pycompat, | |
31 | registrar, |
|
31 | registrar, | |
32 | scmutil, |
|
32 | scmutil, | |
33 | simplemerge, |
|
33 | simplemerge, | |
34 | tagmerge, |
|
34 | tagmerge, | |
35 | templatekw, |
|
35 | templatekw, | |
36 | templater, |
|
36 | templater, | |
37 | templateutil, |
|
37 | templateutil, | |
38 | util, |
|
38 | util, | |
39 | ) |
|
39 | ) | |
40 |
|
40 | |||
41 | from .utils import ( |
|
41 | from .utils import ( | |
42 | procutil, |
|
42 | procutil, | |
43 | stringutil, |
|
43 | stringutil, | |
44 | ) |
|
44 | ) | |
45 |
|
45 | |||
46 |
|
46 | |||
47 | def _toolstr(ui, tool, part, *args): |
|
47 | def _toolstr(ui, tool, part, *args): | |
48 | return ui.config(b"merge-tools", tool + b"." + part, *args) |
|
48 | return ui.config(b"merge-tools", tool + b"." + part, *args) | |
49 |
|
49 | |||
50 |
|
50 | |||
51 | def _toolbool(ui, tool, part, *args): |
|
51 | def _toolbool(ui, tool, part, *args): | |
52 | return ui.configbool(b"merge-tools", tool + b"." + part, *args) |
|
52 | return ui.configbool(b"merge-tools", tool + b"." + part, *args) | |
53 |
|
53 | |||
54 |
|
54 | |||
55 | def _toollist(ui, tool, part): |
|
55 | def _toollist(ui, tool, part): | |
56 | return ui.configlist(b"merge-tools", tool + b"." + part) |
|
56 | return ui.configlist(b"merge-tools", tool + b"." + part) | |
57 |
|
57 | |||
58 |
|
58 | |||
59 | internals = {} |
|
59 | internals = {} | |
60 | # Merge tools to document. |
|
60 | # Merge tools to document. | |
61 | internalsdoc = {} |
|
61 | internalsdoc = {} | |
62 |
|
62 | |||
63 | internaltool = registrar.internalmerge() |
|
63 | internaltool = registrar.internalmerge() | |
64 |
|
64 | |||
65 | # internal tool merge types |
|
65 | # internal tool merge types | |
66 | nomerge = internaltool.nomerge |
|
66 | nomerge = internaltool.nomerge | |
67 | mergeonly = internaltool.mergeonly # just the full merge, no premerge |
|
67 | mergeonly = internaltool.mergeonly # just the full merge, no premerge | |
68 | fullmerge = internaltool.fullmerge # both premerge and merge |
|
68 | fullmerge = internaltool.fullmerge # both premerge and merge | |
69 |
|
69 | |||
70 | # IMPORTANT: keep the last line of this prompt very short ("What do you want to |
|
70 | # IMPORTANT: keep the last line of this prompt very short ("What do you want to | |
71 | # do?") because of issue6158, ideally to <40 English characters (to allow other |
|
71 | # do?") because of issue6158, ideally to <40 English characters (to allow other | |
72 | # languages that may take more columns to still have a chance to fit in an |
|
72 | # languages that may take more columns to still have a chance to fit in an | |
73 | # 80-column screen). |
|
73 | # 80-column screen). | |
74 | _localchangedotherdeletedmsg = _( |
|
74 | _localchangedotherdeletedmsg = _( | |
75 | b"file '%(fd)s' was deleted in other%(o)s but was modified in local%(l)s.\n" |
|
75 | b"file '%(fd)s' was deleted in other%(o)s but was modified in local%(l)s.\n" | |
76 | b"You can use (c)hanged version, (d)elete, or leave (u)nresolved.\n" |
|
76 | b"You can use (c)hanged version, (d)elete, or leave (u)nresolved.\n" | |
77 | b"What do you want to do?" |
|
77 | b"What do you want to do?" | |
78 | b"$$ &Changed $$ &Delete $$ &Unresolved" |
|
78 | b"$$ &Changed $$ &Delete $$ &Unresolved" | |
79 | ) |
|
79 | ) | |
80 |
|
80 | |||
81 | _otherchangedlocaldeletedmsg = _( |
|
81 | _otherchangedlocaldeletedmsg = _( | |
82 | b"file '%(fd)s' was deleted in local%(l)s but was modified in other%(o)s.\n" |
|
82 | b"file '%(fd)s' was deleted in local%(l)s but was modified in other%(o)s.\n" | |
83 | b"You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.\n" |
|
83 | b"You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.\n" | |
84 | b"What do you want to do?" |
|
84 | b"What do you want to do?" | |
85 | b"$$ &Changed $$ &Deleted $$ &Unresolved" |
|
85 | b"$$ &Changed $$ &Deleted $$ &Unresolved" | |
86 | ) |
|
86 | ) | |
87 |
|
87 | |||
88 |
|
88 | |||
89 | class absentfilectx(object): |
|
89 | class absentfilectx(object): | |
90 | """Represents a file that's ostensibly in a context but is actually not |
|
90 | """Represents a file that's ostensibly in a context but is actually not | |
91 | present in it. |
|
91 | present in it. | |
92 |
|
92 | |||
93 | This is here because it's very specific to the filemerge code for now -- |
|
93 | This is here because it's very specific to the filemerge code for now -- | |
94 | other code is likely going to break with the values this returns.""" |
|
94 | other code is likely going to break with the values this returns.""" | |
95 |
|
95 | |||
96 | def __init__(self, ctx, f): |
|
96 | def __init__(self, ctx, f): | |
97 | self._ctx = ctx |
|
97 | self._ctx = ctx | |
98 | self._f = f |
|
98 | self._f = f | |
99 |
|
99 | |||
100 | def __bytes__(self): |
|
100 | def __bytes__(self): | |
101 | return b'absent file %s@%s' % (self._f, self._ctx) |
|
101 | return b'absent file %s@%s' % (self._f, self._ctx) | |
102 |
|
102 | |||
103 | def path(self): |
|
103 | def path(self): | |
104 | return self._f |
|
104 | return self._f | |
105 |
|
105 | |||
106 | def size(self): |
|
106 | def size(self): | |
107 | return None |
|
107 | return None | |
108 |
|
108 | |||
109 | def data(self): |
|
109 | def data(self): | |
110 | return None |
|
110 | return None | |
111 |
|
111 | |||
112 | def filenode(self): |
|
112 | def filenode(self): | |
113 | return self._ctx.repo().nullid |
|
113 | return self._ctx.repo().nullid | |
114 |
|
114 | |||
115 | _customcmp = True |
|
115 | _customcmp = True | |
116 |
|
116 | |||
117 | def cmp(self, fctx): |
|
117 | def cmp(self, fctx): | |
118 | """compare with other file context |
|
118 | """compare with other file context | |
119 |
|
119 | |||
120 | returns True if different from fctx. |
|
120 | returns True if different from fctx. | |
121 | """ |
|
121 | """ | |
122 | return not ( |
|
122 | return not ( | |
123 | fctx.isabsent() |
|
123 | fctx.isabsent() | |
124 | and fctx.changectx() == self.changectx() |
|
124 | and fctx.changectx() == self.changectx() | |
125 | and fctx.path() == self.path() |
|
125 | and fctx.path() == self.path() | |
126 | ) |
|
126 | ) | |
127 |
|
127 | |||
128 | def flags(self): |
|
128 | def flags(self): | |
129 | return b'' |
|
129 | return b'' | |
130 |
|
130 | |||
131 | def changectx(self): |
|
131 | def changectx(self): | |
132 | return self._ctx |
|
132 | return self._ctx | |
133 |
|
133 | |||
134 | def isbinary(self): |
|
134 | def isbinary(self): | |
135 | return False |
|
135 | return False | |
136 |
|
136 | |||
137 | def isabsent(self): |
|
137 | def isabsent(self): | |
138 | return True |
|
138 | return True | |
139 |
|
139 | |||
140 |
|
140 | |||
141 | def _findtool(ui, tool): |
|
141 | def _findtool(ui, tool): | |
142 | if tool in internals: |
|
142 | if tool in internals: | |
143 | return tool |
|
143 | return tool | |
144 | cmd = _toolstr(ui, tool, b"executable", tool) |
|
144 | cmd = _toolstr(ui, tool, b"executable", tool) | |
145 | if cmd.startswith(b'python:'): |
|
145 | if cmd.startswith(b'python:'): | |
146 | return cmd |
|
146 | return cmd | |
147 | return findexternaltool(ui, tool) |
|
147 | return findexternaltool(ui, tool) | |
148 |
|
148 | |||
149 |
|
149 | |||
150 | def _quotetoolpath(cmd): |
|
150 | def _quotetoolpath(cmd): | |
151 | if cmd.startswith(b'python:'): |
|
151 | if cmd.startswith(b'python:'): | |
152 | return cmd |
|
152 | return cmd | |
153 | return procutil.shellquote(cmd) |
|
153 | return procutil.shellquote(cmd) | |
154 |
|
154 | |||
155 |
|
155 | |||
156 | def findexternaltool(ui, tool): |
|
156 | def findexternaltool(ui, tool): | |
157 | for kn in (b"regkey", b"regkeyalt"): |
|
157 | for kn in (b"regkey", b"regkeyalt"): | |
158 | k = _toolstr(ui, tool, kn) |
|
158 | k = _toolstr(ui, tool, kn) | |
159 | if not k: |
|
159 | if not k: | |
160 | continue |
|
160 | continue | |
161 | p = util.lookupreg(k, _toolstr(ui, tool, b"regname")) |
|
161 | p = util.lookupreg(k, _toolstr(ui, tool, b"regname")) | |
162 | if p: |
|
162 | if p: | |
163 | p = procutil.findexe(p + _toolstr(ui, tool, b"regappend", b"")) |
|
163 | p = procutil.findexe(p + _toolstr(ui, tool, b"regappend", b"")) | |
164 | if p: |
|
164 | if p: | |
165 | return p |
|
165 | return p | |
166 | exe = _toolstr(ui, tool, b"executable", tool) |
|
166 | exe = _toolstr(ui, tool, b"executable", tool) | |
167 | return procutil.findexe(util.expandpath(exe)) |
|
167 | return procutil.findexe(util.expandpath(exe)) | |
168 |
|
168 | |||
169 |
|
169 | |||
170 | def _picktool(repo, ui, path, binary, symlink, changedelete): |
|
170 | def _picktool(repo, ui, path, binary, symlink, changedelete): | |
171 | strictcheck = ui.configbool(b'merge', b'strict-capability-check') |
|
171 | strictcheck = ui.configbool(b'merge', b'strict-capability-check') | |
172 |
|
172 | |||
173 | def hascapability(tool, capability, strict=False): |
|
173 | def hascapability(tool, capability, strict=False): | |
174 | if tool in internals: |
|
174 | if tool in internals: | |
175 | return strict and internals[tool].capabilities.get(capability) |
|
175 | return strict and internals[tool].capabilities.get(capability) | |
176 | return _toolbool(ui, tool, capability) |
|
176 | return _toolbool(ui, tool, capability) | |
177 |
|
177 | |||
178 | def supportscd(tool): |
|
178 | def supportscd(tool): | |
179 | return tool in internals and internals[tool].mergetype == nomerge |
|
179 | return tool in internals and internals[tool].mergetype == nomerge | |
180 |
|
180 | |||
181 | def check(tool, pat, symlink, binary, changedelete): |
|
181 | def check(tool, pat, symlink, binary, changedelete): | |
182 | tmsg = tool |
|
182 | tmsg = tool | |
183 | if pat: |
|
183 | if pat: | |
184 | tmsg = _(b"%s (for pattern %s)") % (tool, pat) |
|
184 | tmsg = _(b"%s (for pattern %s)") % (tool, pat) | |
185 | if not _findtool(ui, tool): |
|
185 | if not _findtool(ui, tool): | |
186 | if pat: # explicitly requested tool deserves a warning |
|
186 | if pat: # explicitly requested tool deserves a warning | |
187 | ui.warn(_(b"couldn't find merge tool %s\n") % tmsg) |
|
187 | ui.warn(_(b"couldn't find merge tool %s\n") % tmsg) | |
188 | else: # configured but non-existing tools are more silent |
|
188 | else: # configured but non-existing tools are more silent | |
189 | ui.note(_(b"couldn't find merge tool %s\n") % tmsg) |
|
189 | ui.note(_(b"couldn't find merge tool %s\n") % tmsg) | |
190 | elif symlink and not hascapability(tool, b"symlink", strictcheck): |
|
190 | elif symlink and not hascapability(tool, b"symlink", strictcheck): | |
191 | ui.warn(_(b"tool %s can't handle symlinks\n") % tmsg) |
|
191 | ui.warn(_(b"tool %s can't handle symlinks\n") % tmsg) | |
192 | elif binary and not hascapability(tool, b"binary", strictcheck): |
|
192 | elif binary and not hascapability(tool, b"binary", strictcheck): | |
193 | ui.warn(_(b"tool %s can't handle binary\n") % tmsg) |
|
193 | ui.warn(_(b"tool %s can't handle binary\n") % tmsg) | |
194 | elif changedelete and not supportscd(tool): |
|
194 | elif changedelete and not supportscd(tool): | |
195 | # the nomerge tools are the only tools that support change/delete |
|
195 | # the nomerge tools are the only tools that support change/delete | |
196 | # conflicts |
|
196 | # conflicts | |
197 | pass |
|
197 | pass | |
198 | elif not procutil.gui() and _toolbool(ui, tool, b"gui"): |
|
198 | elif not procutil.gui() and _toolbool(ui, tool, b"gui"): | |
199 | ui.warn(_(b"tool %s requires a GUI\n") % tmsg) |
|
199 | ui.warn(_(b"tool %s requires a GUI\n") % tmsg) | |
200 | else: |
|
200 | else: | |
201 | return True |
|
201 | return True | |
202 | return False |
|
202 | return False | |
203 |
|
203 | |||
204 | # internal config: ui.forcemerge |
|
204 | # internal config: ui.forcemerge | |
205 | # forcemerge comes from command line arguments, highest priority |
|
205 | # forcemerge comes from command line arguments, highest priority | |
206 | force = ui.config(b'ui', b'forcemerge') |
|
206 | force = ui.config(b'ui', b'forcemerge') | |
207 | if force: |
|
207 | if force: | |
208 | toolpath = _findtool(ui, force) |
|
208 | toolpath = _findtool(ui, force) | |
209 | if changedelete and not supportscd(toolpath): |
|
209 | if changedelete and not supportscd(toolpath): | |
210 | return b":prompt", None |
|
210 | return b":prompt", None | |
211 | else: |
|
211 | else: | |
212 | if toolpath: |
|
212 | if toolpath: | |
213 | return (force, _quotetoolpath(toolpath)) |
|
213 | return (force, _quotetoolpath(toolpath)) | |
214 | else: |
|
214 | else: | |
215 | # mimic HGMERGE if given tool not found |
|
215 | # mimic HGMERGE if given tool not found | |
216 | return (force, force) |
|
216 | return (force, force) | |
217 |
|
217 | |||
218 | # HGMERGE takes next precedence |
|
218 | # HGMERGE takes next precedence | |
219 | hgmerge = encoding.environ.get(b"HGMERGE") |
|
219 | hgmerge = encoding.environ.get(b"HGMERGE") | |
220 | if hgmerge: |
|
220 | if hgmerge: | |
221 | if changedelete and not supportscd(hgmerge): |
|
221 | if changedelete and not supportscd(hgmerge): | |
222 | return b":prompt", None |
|
222 | return b":prompt", None | |
223 | else: |
|
223 | else: | |
224 | return (hgmerge, hgmerge) |
|
224 | return (hgmerge, hgmerge) | |
225 |
|
225 | |||
226 | # then patterns |
|
226 | # then patterns | |
227 |
|
227 | |||
228 | # whether binary capability should be checked strictly |
|
228 | # whether binary capability should be checked strictly | |
229 | binarycap = binary and strictcheck |
|
229 | binarycap = binary and strictcheck | |
230 |
|
230 | |||
231 | for pat, tool in ui.configitems(b"merge-patterns"): |
|
231 | for pat, tool in ui.configitems(b"merge-patterns"): | |
232 | mf = match.match(repo.root, b'', [pat]) |
|
232 | mf = match.match(repo.root, b'', [pat]) | |
233 | if mf(path) and check(tool, pat, symlink, binarycap, changedelete): |
|
233 | if mf(path) and check(tool, pat, symlink, binarycap, changedelete): | |
234 | if binary and not hascapability(tool, b"binary", strict=True): |
|
234 | if binary and not hascapability(tool, b"binary", strict=True): | |
235 | ui.warn( |
|
235 | ui.warn( | |
236 | _( |
|
236 | _( | |
237 | b"warning: check merge-patterns configurations," |
|
237 | b"warning: check merge-patterns configurations," | |
238 | b" if %r for binary file %r is unintentional\n" |
|
238 | b" if %r for binary file %r is unintentional\n" | |
239 | b"(see 'hg help merge-tools'" |
|
239 | b"(see 'hg help merge-tools'" | |
240 | b" for binary files capability)\n" |
|
240 | b" for binary files capability)\n" | |
241 | ) |
|
241 | ) | |
242 | % (pycompat.bytestr(tool), pycompat.bytestr(path)) |
|
242 | % (pycompat.bytestr(tool), pycompat.bytestr(path)) | |
243 | ) |
|
243 | ) | |
244 | toolpath = _findtool(ui, tool) |
|
244 | toolpath = _findtool(ui, tool) | |
245 | return (tool, _quotetoolpath(toolpath)) |
|
245 | return (tool, _quotetoolpath(toolpath)) | |
246 |
|
246 | |||
247 | # then merge tools |
|
247 | # then merge tools | |
248 | tools = {} |
|
248 | tools = {} | |
249 | disabled = set() |
|
249 | disabled = set() | |
250 | for k, v in ui.configitems(b"merge-tools"): |
|
250 | for k, v in ui.configitems(b"merge-tools"): | |
251 | t = k.split(b'.')[0] |
|
251 | t = k.split(b'.')[0] | |
252 | if t not in tools: |
|
252 | if t not in tools: | |
253 | tools[t] = int(_toolstr(ui, t, b"priority")) |
|
253 | tools[t] = int(_toolstr(ui, t, b"priority")) | |
254 | if _toolbool(ui, t, b"disabled"): |
|
254 | if _toolbool(ui, t, b"disabled"): | |
255 | disabled.add(t) |
|
255 | disabled.add(t) | |
256 | names = tools.keys() |
|
256 | names = tools.keys() | |
257 | tools = sorted( |
|
257 | tools = sorted( | |
258 | [(-p, tool) for tool, p in tools.items() if tool not in disabled] |
|
258 | [(-p, tool) for tool, p in tools.items() if tool not in disabled] | |
259 | ) |
|
259 | ) | |
260 | uimerge = ui.config(b"ui", b"merge") |
|
260 | uimerge = ui.config(b"ui", b"merge") | |
261 | if uimerge: |
|
261 | if uimerge: | |
262 | # external tools defined in uimerge won't be able to handle |
|
262 | # external tools defined in uimerge won't be able to handle | |
263 | # change/delete conflicts |
|
263 | # change/delete conflicts | |
264 | if check(uimerge, path, symlink, binary, changedelete): |
|
264 | if check(uimerge, path, symlink, binary, changedelete): | |
265 | if uimerge not in names and not changedelete: |
|
265 | if uimerge not in names and not changedelete: | |
266 | return (uimerge, uimerge) |
|
266 | return (uimerge, uimerge) | |
267 | tools.insert(0, (None, uimerge)) # highest priority |
|
267 | tools.insert(0, (None, uimerge)) # highest priority | |
268 | tools.append((None, b"hgmerge")) # the old default, if found |
|
268 | tools.append((None, b"hgmerge")) # the old default, if found | |
269 | for p, t in tools: |
|
269 | for p, t in tools: | |
270 | if check(t, None, symlink, binary, changedelete): |
|
270 | if check(t, None, symlink, binary, changedelete): | |
271 | toolpath = _findtool(ui, t) |
|
271 | toolpath = _findtool(ui, t) | |
272 | return (t, _quotetoolpath(toolpath)) |
|
272 | return (t, _quotetoolpath(toolpath)) | |
273 |
|
273 | |||
274 | # internal merge or prompt as last resort |
|
274 | # internal merge or prompt as last resort | |
275 | if symlink or binary or changedelete: |
|
275 | if symlink or binary or changedelete: | |
276 | if not changedelete and len(tools): |
|
276 | if not changedelete and len(tools): | |
277 | # any tool is rejected by capability for symlink or binary |
|
277 | # any tool is rejected by capability for symlink or binary | |
278 | ui.warn(_(b"no tool found to merge %s\n") % path) |
|
278 | ui.warn(_(b"no tool found to merge %s\n") % path) | |
279 | return b":prompt", None |
|
279 | return b":prompt", None | |
280 | return b":merge", None |
|
280 | return b":merge", None | |
281 |
|
281 | |||
282 |
|
282 | |||
283 | def _eoltype(data): |
|
283 | def _eoltype(data): | |
284 | """Guess the EOL type of a file""" |
|
284 | """Guess the EOL type of a file""" | |
285 | if b'\0' in data: # binary |
|
285 | if b'\0' in data: # binary | |
286 | return None |
|
286 | return None | |
287 | if b'\r\n' in data: # Windows |
|
287 | if b'\r\n' in data: # Windows | |
288 | return b'\r\n' |
|
288 | return b'\r\n' | |
289 | if b'\r' in data: # Old Mac |
|
289 | if b'\r' in data: # Old Mac | |
290 | return b'\r' |
|
290 | return b'\r' | |
291 | if b'\n' in data: # UNIX |
|
291 | if b'\n' in data: # UNIX | |
292 | return b'\n' |
|
292 | return b'\n' | |
293 | return None # unknown |
|
293 | return None # unknown | |
294 |
|
294 | |||
295 |
|
295 | |||
296 | def _matcheol(file, backup): |
|
296 | def _matcheol(file, backup): | |
297 | """Convert EOL markers in a file to match origfile""" |
|
297 | """Convert EOL markers in a file to match origfile""" | |
298 | tostyle = _eoltype(backup.data()) # No repo.wread filters? |
|
298 | tostyle = _eoltype(backup.data()) # No repo.wread filters? | |
299 | if tostyle: |
|
299 | if tostyle: | |
300 | data = util.readfile(file) |
|
300 | data = util.readfile(file) | |
301 | style = _eoltype(data) |
|
301 | style = _eoltype(data) | |
302 | if style: |
|
302 | if style: | |
303 | newdata = data.replace(style, tostyle) |
|
303 | newdata = data.replace(style, tostyle) | |
304 | if newdata != data: |
|
304 | if newdata != data: | |
305 | util.writefile(file, newdata) |
|
305 | util.writefile(file, newdata) | |
306 |
|
306 | |||
307 |
|
307 | |||
308 | @internaltool(b'prompt', nomerge) |
|
308 | @internaltool(b'prompt', nomerge) | |
309 | def _iprompt(repo, mynode, local, other, base, toolconf): |
|
309 | def _iprompt(repo, mynode, local, other, base, toolconf): | |
310 | """Asks the user which of the local `p1()` or the other `p2()` version to |
|
310 | """Asks the user which of the local `p1()` or the other `p2()` version to | |
311 | keep as the merged version.""" |
|
311 | keep as the merged version.""" | |
312 | ui = repo.ui |
|
312 | ui = repo.ui | |
313 | fd = local.fctx.path() |
|
313 | fd = local.fctx.path() | |
314 | uipathfn = scmutil.getuipathfn(repo) |
|
314 | uipathfn = scmutil.getuipathfn(repo) | |
315 |
|
315 | |||
316 | # Avoid prompting during an in-memory merge since it doesn't support merge |
|
316 | # Avoid prompting during an in-memory merge since it doesn't support merge | |
317 | # conflicts. |
|
317 | # conflicts. | |
318 | if local.fctx.changectx().isinmemory(): |
|
318 | if local.fctx.changectx().isinmemory(): | |
319 | raise error.InMemoryMergeConflictsError( |
|
319 | raise error.InMemoryMergeConflictsError( | |
320 | b'in-memory merge does not support file conflicts' |
|
320 | b'in-memory merge does not support file conflicts' | |
321 | ) |
|
321 | ) | |
322 |
|
322 | |||
323 | prompts = partextras([local.label, other.label]) |
|
323 | prompts = partextras([local.label, other.label]) | |
324 | prompts[b'fd'] = uipathfn(fd) |
|
324 | prompts[b'fd'] = uipathfn(fd) | |
325 | try: |
|
325 | try: | |
326 | if other.fctx.isabsent(): |
|
326 | if other.fctx.isabsent(): | |
327 | index = ui.promptchoice(_localchangedotherdeletedmsg % prompts, 2) |
|
327 | index = ui.promptchoice(_localchangedotherdeletedmsg % prompts, 2) | |
328 | choice = [b'local', b'other', b'unresolved'][index] |
|
328 | choice = [b'local', b'other', b'unresolved'][index] | |
329 | elif local.fctx.isabsent(): |
|
329 | elif local.fctx.isabsent(): | |
330 | index = ui.promptchoice(_otherchangedlocaldeletedmsg % prompts, 2) |
|
330 | index = ui.promptchoice(_otherchangedlocaldeletedmsg % prompts, 2) | |
331 | choice = [b'other', b'local', b'unresolved'][index] |
|
331 | choice = [b'other', b'local', b'unresolved'][index] | |
332 | else: |
|
332 | else: | |
333 | # IMPORTANT: keep the last line of this prompt ("What do you want to |
|
333 | # IMPORTANT: keep the last line of this prompt ("What do you want to | |
334 | # do?") very short, see comment next to _localchangedotherdeletedmsg |
|
334 | # do?") very short, see comment next to _localchangedotherdeletedmsg | |
335 | # at the top of the file for details. |
|
335 | # at the top of the file for details. | |
336 | index = ui.promptchoice( |
|
336 | index = ui.promptchoice( | |
337 | _( |
|
337 | _( | |
338 | b"file '%(fd)s' needs to be resolved.\n" |
|
338 | b"file '%(fd)s' needs to be resolved.\n" | |
339 | b"You can keep (l)ocal%(l)s, take (o)ther%(o)s, or leave " |
|
339 | b"You can keep (l)ocal%(l)s, take (o)ther%(o)s, or leave " | |
340 | b"(u)nresolved.\n" |
|
340 | b"(u)nresolved.\n" | |
341 | b"What do you want to do?" |
|
341 | b"What do you want to do?" | |
342 | b"$$ &Local $$ &Other $$ &Unresolved" |
|
342 | b"$$ &Local $$ &Other $$ &Unresolved" | |
343 | ) |
|
343 | ) | |
344 | % prompts, |
|
344 | % prompts, | |
345 | 2, |
|
345 | 2, | |
346 | ) |
|
346 | ) | |
347 | choice = [b'local', b'other', b'unresolved'][index] |
|
347 | choice = [b'local', b'other', b'unresolved'][index] | |
348 |
|
348 | |||
349 | if choice == b'other': |
|
349 | if choice == b'other': | |
350 | return _iother(repo, mynode, local, other, base, toolconf) |
|
350 | return _iother(repo, mynode, local, other, base, toolconf) | |
351 | elif choice == b'local': |
|
351 | elif choice == b'local': | |
352 | return _ilocal(repo, mynode, local, other, base, toolconf) |
|
352 | return _ilocal(repo, mynode, local, other, base, toolconf) | |
353 | elif choice == b'unresolved': |
|
353 | elif choice == b'unresolved': | |
354 | return _ifail(repo, mynode, local, other, base, toolconf) |
|
354 | return _ifail(repo, mynode, local, other, base, toolconf) | |
355 | except error.ResponseExpected: |
|
355 | except error.ResponseExpected: | |
356 | ui.write(b"\n") |
|
356 | ui.write(b"\n") | |
357 | return _ifail(repo, mynode, local, other, base, toolconf) |
|
357 | return _ifail(repo, mynode, local, other, base, toolconf) | |
358 |
|
358 | |||
359 |
|
359 | |||
360 | @internaltool(b'local', nomerge) |
|
360 | @internaltool(b'local', nomerge) | |
361 | def _ilocal(repo, mynode, local, other, base, toolconf): |
|
361 | def _ilocal(repo, mynode, local, other, base, toolconf): | |
362 | """Uses the local `p1()` version of files as the merged version.""" |
|
362 | """Uses the local `p1()` version of files as the merged version.""" | |
363 | return 0, local.fctx.isabsent() |
|
363 | return 0, local.fctx.isabsent() | |
364 |
|
364 | |||
365 |
|
365 | |||
366 | @internaltool(b'other', nomerge) |
|
366 | @internaltool(b'other', nomerge) | |
367 | def _iother(repo, mynode, local, other, base, toolconf): |
|
367 | def _iother(repo, mynode, local, other, base, toolconf): | |
368 | """Uses the other `p2()` version of files as the merged version.""" |
|
368 | """Uses the other `p2()` version of files as the merged version.""" | |
369 | if other.fctx.isabsent(): |
|
369 | if other.fctx.isabsent(): | |
370 | # local changed, remote deleted -- 'deleted' picked |
|
370 | # local changed, remote deleted -- 'deleted' picked | |
371 | _underlyingfctxifabsent(local.fctx).remove() |
|
371 | _underlyingfctxifabsent(local.fctx).remove() | |
372 | deleted = True |
|
372 | deleted = True | |
373 | else: |
|
373 | else: | |
374 | _underlyingfctxifabsent(local.fctx).write( |
|
374 | _underlyingfctxifabsent(local.fctx).write( | |
375 | other.fctx.data(), other.fctx.flags() |
|
375 | other.fctx.data(), other.fctx.flags() | |
376 | ) |
|
376 | ) | |
377 | deleted = False |
|
377 | deleted = False | |
378 | return 0, deleted |
|
378 | return 0, deleted | |
379 |
|
379 | |||
380 |
|
380 | |||
381 | @internaltool(b'fail', nomerge) |
|
381 | @internaltool(b'fail', nomerge) | |
382 | def _ifail(repo, mynode, local, other, base, toolconf): |
|
382 | def _ifail(repo, mynode, local, other, base, toolconf): | |
383 | """ |
|
383 | """ | |
384 | Rather than attempting to merge files that were modified on both |
|
384 | Rather than attempting to merge files that were modified on both | |
385 | branches, it marks them as unresolved. The resolve command must be |
|
385 | branches, it marks them as unresolved. The resolve command must be | |
386 | used to resolve these conflicts.""" |
|
386 | used to resolve these conflicts.""" | |
387 | # for change/delete conflicts write out the changed version, then fail |
|
387 | # for change/delete conflicts write out the changed version, then fail | |
388 | if local.fctx.isabsent(): |
|
388 | if local.fctx.isabsent(): | |
389 | _underlyingfctxifabsent(local.fctx).write( |
|
389 | _underlyingfctxifabsent(local.fctx).write( | |
390 | other.fctx.data(), other.fctx.flags() |
|
390 | other.fctx.data(), other.fctx.flags() | |
391 | ) |
|
391 | ) | |
392 | return 1, False |
|
392 | return 1, False | |
393 |
|
393 | |||
394 |
|
394 | |||
395 | def _underlyingfctxifabsent(filectx): |
|
395 | def _underlyingfctxifabsent(filectx): | |
396 | """Sometimes when resolving, our fcd is actually an absentfilectx, but |
|
396 | """Sometimes when resolving, our fcd is actually an absentfilectx, but | |
397 | we want to write to it (to do the resolve). This helper returns the |
|
397 | we want to write to it (to do the resolve). This helper returns the | |
398 | underyling workingfilectx in that case. |
|
398 | underyling workingfilectx in that case. | |
399 | """ |
|
399 | """ | |
400 | if filectx.isabsent(): |
|
400 | if filectx.isabsent(): | |
401 | return filectx.changectx()[filectx.path()] |
|
401 | return filectx.changectx()[filectx.path()] | |
402 | else: |
|
402 | else: | |
403 | return filectx |
|
403 | return filectx | |
404 |
|
404 | |||
405 |
|
405 | |||
406 | def _verifytext(input, ui): |
|
406 | def _verifytext(input, ui): | |
407 | """verifies that text is non-binary""" |
|
407 | """verifies that text is non-binary""" | |
408 | if stringutil.binary(input.text()): |
|
408 | if stringutil.binary(input.text()): | |
409 | msg = _(b"%s looks like a binary file.") % input.fctx.path() |
|
409 | msg = _(b"%s looks like a binary file.") % input.fctx.path() | |
410 | ui.warn(_(b'warning: %s\n') % msg) |
|
410 | ui.warn(_(b'warning: %s\n') % msg) | |
411 | raise error.Abort(msg) |
|
411 | raise error.Abort(msg) | |
412 |
|
412 | |||
413 |
|
413 | |||
414 | def _premerge(repo, local, other, base, toolconf): |
|
414 | def _premerge(repo, local, other, base, toolconf): | |
415 | tool, toolpath, binary, symlink, scriptfn = toolconf |
|
415 | tool, toolpath, binary, symlink, scriptfn = toolconf | |
416 | if symlink or local.fctx.isabsent() or other.fctx.isabsent(): |
|
416 | if symlink or local.fctx.isabsent() or other.fctx.isabsent(): | |
417 | return 1 |
|
417 | return 1 | |
418 |
|
418 | |||
419 | ui = repo.ui |
|
419 | ui = repo.ui | |
420 |
|
420 | |||
421 | validkeep = [b'keep', b'keep-merge3', b'keep-mergediff'] |
|
421 | validkeep = [b'keep', b'keep-merge3', b'keep-mergediff'] | |
422 |
|
422 | |||
423 | # do we attempt to simplemerge first? |
|
423 | # do we attempt to simplemerge first? | |
424 | try: |
|
424 | try: | |
425 | premerge = _toolbool(ui, tool, b"premerge", not binary) |
|
425 | premerge = _toolbool(ui, tool, b"premerge", not binary) | |
426 | except error.ConfigError: |
|
426 | except error.ConfigError: | |
427 | premerge = _toolstr(ui, tool, b"premerge", b"").lower() |
|
427 | premerge = _toolstr(ui, tool, b"premerge", b"").lower() | |
428 | if premerge not in validkeep: |
|
428 | if premerge not in validkeep: | |
429 | _valid = b', '.join([b"'" + v + b"'" for v in validkeep]) |
|
429 | _valid = b', '.join([b"'" + v + b"'" for v in validkeep]) | |
430 | raise error.ConfigError( |
|
430 | raise error.ConfigError( | |
431 | _(b"%s.premerge not valid ('%s' is neither boolean nor %s)") |
|
431 | _(b"%s.premerge not valid ('%s' is neither boolean nor %s)") | |
432 | % (tool, premerge, _valid) |
|
432 | % (tool, premerge, _valid) | |
433 | ) |
|
433 | ) | |
434 |
|
434 | |||
435 | if premerge: |
|
435 | if premerge: | |
436 | mode = b'merge' |
|
436 | mode = b'merge' | |
437 | if premerge == b'keep-mergediff': |
|
437 | if premerge == b'keep-mergediff': | |
438 | mode = b'mergediff' |
|
438 | mode = b'mergediff' | |
439 | elif premerge == b'keep-merge3': |
|
439 | elif premerge == b'keep-merge3': | |
440 | mode = b'merge3' |
|
440 | mode = b'merge3' | |
441 | if any( |
|
441 | if any( | |
442 | stringutil.binary(input.text()) for input in (local, base, other) |
|
442 | stringutil.binary(input.text()) for input in (local, base, other) | |
443 | ): |
|
443 | ): | |
444 | return 1 # continue merging |
|
444 | return 1 # continue merging | |
445 | merged_text, conflicts = simplemerge.simplemerge( |
|
445 | merged_text, conflicts = simplemerge.simplemerge( | |
446 | local, base, other, mode=mode |
|
446 | local, base, other, mode=mode | |
447 | ) |
|
447 | ) | |
448 | if not conflicts or premerge in validkeep: |
|
448 | if not conflicts or premerge in validkeep: | |
449 | # fcd.flags() already has the merged flags (done in |
|
449 | # fcd.flags() already has the merged flags (done in | |
450 | # mergestate.resolve()) |
|
450 | # mergestate.resolve()) | |
451 | local.fctx.write(merged_text, local.fctx.flags()) |
|
451 | local.fctx.write(merged_text, local.fctx.flags()) | |
452 | if not conflicts: |
|
452 | if not conflicts: | |
453 | ui.debug(b" premerge successful\n") |
|
453 | ui.debug(b" premerge successful\n") | |
454 | return 0 |
|
454 | return 0 | |
455 | return 1 # continue merging |
|
455 | return 1 # continue merging | |
456 |
|
456 | |||
457 |
|
457 | |||
458 | def _mergecheck(repo, mynode, fcd, fco, fca, toolconf): |
|
458 | def _mergecheck(repo, mynode, fcd, fco, fca, toolconf): | |
459 | tool, toolpath, binary, symlink, scriptfn = toolconf |
|
459 | tool, toolpath, binary, symlink, scriptfn = toolconf | |
460 | uipathfn = scmutil.getuipathfn(repo) |
|
460 | uipathfn = scmutil.getuipathfn(repo) | |
461 | if symlink: |
|
461 | if symlink: | |
462 | repo.ui.warn( |
|
462 | repo.ui.warn( | |
463 | _(b'warning: internal %s cannot merge symlinks for %s\n') |
|
463 | _(b'warning: internal %s cannot merge symlinks for %s\n') | |
464 | % (tool, uipathfn(fcd.path())) |
|
464 | % (tool, uipathfn(fcd.path())) | |
465 | ) |
|
465 | ) | |
466 | return False |
|
466 | return False | |
467 | if fcd.isabsent() or fco.isabsent(): |
|
467 | if fcd.isabsent() or fco.isabsent(): | |
468 | repo.ui.warn( |
|
468 | repo.ui.warn( | |
469 | _( |
|
469 | _( | |
470 | b'warning: internal %s cannot merge change/delete ' |
|
470 | b'warning: internal %s cannot merge change/delete ' | |
471 | b'conflict for %s\n' |
|
471 | b'conflict for %s\n' | |
472 | ) |
|
472 | ) | |
473 | % (tool, uipathfn(fcd.path())) |
|
473 | % (tool, uipathfn(fcd.path())) | |
474 | ) |
|
474 | ) | |
475 | return False |
|
475 | return False | |
476 | return True |
|
476 | return True | |
477 |
|
477 | |||
478 |
|
478 | |||
479 | def _merge(repo, local, other, base, mode): |
|
479 | def _merge(repo, local, other, base, mode): | |
480 | """ |
|
480 | """ | |
481 | Uses the internal non-interactive simple merge algorithm for merging |
|
481 | Uses the internal non-interactive simple merge algorithm for merging | |
482 | files. It will fail if there are any conflicts and leave markers in |
|
482 | files. It will fail if there are any conflicts and leave markers in | |
483 | the partially merged file. Markers will have two sections, one for each side |
|
483 | the partially merged file. Markers will have two sections, one for each side | |
484 | of merge, unless mode equals 'union' which suppresses the markers.""" |
|
484 | of merge, unless mode equals 'union' which suppresses the markers.""" | |
485 | ui = repo.ui |
|
485 | ui = repo.ui | |
486 |
|
486 | |||
487 | try: |
|
487 | try: | |
488 | _verifytext(local, ui) |
|
488 | _verifytext(local, ui) | |
489 | _verifytext(base, ui) |
|
489 | _verifytext(base, ui) | |
490 | _verifytext(other, ui) |
|
490 | _verifytext(other, ui) | |
491 | except error.Abort: |
|
491 | except error.Abort: | |
492 | return True, True, False |
|
492 | return True, True, False | |
493 | else: |
|
493 | else: | |
494 | merged_text, conflicts = simplemerge.simplemerge( |
|
494 | merged_text, conflicts = simplemerge.simplemerge( | |
495 | local, base, other, mode=mode |
|
495 | local, base, other, mode=mode | |
496 | ) |
|
496 | ) | |
497 | # fcd.flags() already has the merged flags (done in |
|
497 | # fcd.flags() already has the merged flags (done in | |
498 | # mergestate.resolve()) |
|
498 | # mergestate.resolve()) | |
499 | local.fctx.write(merged_text, local.fctx.flags()) |
|
499 | local.fctx.write(merged_text, local.fctx.flags()) | |
500 | return True, conflicts, False |
|
500 | return True, conflicts, False | |
501 |
|
501 | |||
502 |
|
502 | |||
503 | @internaltool( |
|
503 | @internaltool( | |
504 | b'union', |
|
504 | b'union', | |
505 | fullmerge, |
|
505 | fullmerge, | |
506 | _( |
|
506 | _( | |
507 | b"warning: conflicts while merging %s! " |
|
507 | b"warning: conflicts while merging %s! " | |
508 | b"(edit, then use 'hg resolve --mark')\n" |
|
508 | b"(edit, then use 'hg resolve --mark')\n" | |
509 | ), |
|
509 | ), | |
510 | precheck=_mergecheck, |
|
510 | precheck=_mergecheck, | |
511 | ) |
|
511 | ) | |
512 | def _iunion(repo, mynode, local, other, base, toolconf, backup): |
|
512 | def _iunion(repo, mynode, local, other, base, toolconf, backup): | |
513 | """ |
|
513 | """ | |
514 | Uses the internal non-interactive simple merge algorithm for merging |
|
514 | Uses the internal non-interactive simple merge algorithm for merging | |
515 | files. It will use both left and right sides for conflict regions. |
|
515 | files. It will use both left and right sides for conflict regions. | |
516 | No markers are inserted.""" |
|
516 | No markers are inserted.""" | |
517 | return _merge(repo, local, other, base, b'union') |
|
517 | return _merge(repo, local, other, base, b'union') | |
518 |
|
518 | |||
519 |
|
519 | |||
520 | @internaltool( |
|
520 | @internaltool( | |
521 | b'merge', |
|
521 | b'merge', | |
522 | fullmerge, |
|
522 | fullmerge, | |
523 | _( |
|
523 | _( | |
524 | b"warning: conflicts while merging %s! " |
|
524 | b"warning: conflicts while merging %s! " | |
525 | b"(edit, then use 'hg resolve --mark')\n" |
|
525 | b"(edit, then use 'hg resolve --mark')\n" | |
526 | ), |
|
526 | ), | |
527 | precheck=_mergecheck, |
|
527 | precheck=_mergecheck, | |
528 | ) |
|
528 | ) | |
529 | def _imerge(repo, mynode, local, other, base, toolconf, backup): |
|
529 | def _imerge(repo, mynode, local, other, base, toolconf, backup): | |
530 | """ |
|
530 | """ | |
531 | Uses the internal non-interactive simple merge algorithm for merging |
|
531 | Uses the internal non-interactive simple merge algorithm for merging | |
532 | files. It will fail if there are any conflicts and leave markers in |
|
532 | files. It will fail if there are any conflicts and leave markers in | |
533 | the partially merged file. Markers will have two sections, one for each side |
|
533 | the partially merged file. Markers will have two sections, one for each side | |
534 | of merge.""" |
|
534 | of merge.""" | |
535 | return _merge(repo, local, other, base, b'merge') |
|
535 | return _merge(repo, local, other, base, b'merge') | |
536 |
|
536 | |||
537 |
|
537 | |||
538 | @internaltool( |
|
538 | @internaltool( | |
539 | b'merge3', |
|
539 | b'merge3', | |
540 | fullmerge, |
|
540 | fullmerge, | |
541 | _( |
|
541 | _( | |
542 | b"warning: conflicts while merging %s! " |
|
542 | b"warning: conflicts while merging %s! " | |
543 | b"(edit, then use 'hg resolve --mark')\n" |
|
543 | b"(edit, then use 'hg resolve --mark')\n" | |
544 | ), |
|
544 | ), | |
545 | precheck=_mergecheck, |
|
545 | precheck=_mergecheck, | |
546 | ) |
|
546 | ) | |
547 | def _imerge3(repo, mynode, local, other, base, toolconf, backup): |
|
547 | def _imerge3(repo, mynode, local, other, base, toolconf, backup): | |
548 | """ |
|
548 | """ | |
549 | Uses the internal non-interactive simple merge algorithm for merging |
|
549 | Uses the internal non-interactive simple merge algorithm for merging | |
550 | files. It will fail if there are any conflicts and leave markers in |
|
550 | files. It will fail if there are any conflicts and leave markers in | |
551 | the partially merged file. Marker will have three sections, one from each |
|
551 | the partially merged file. Marker will have three sections, one from each | |
552 | side of the merge and one for the base content.""" |
|
552 | side of the merge and one for the base content.""" | |
553 | return _merge(repo, local, other, base, b'merge3') |
|
553 | return _merge(repo, local, other, base, b'merge3') | |
554 |
|
554 | |||
555 |
|
555 | |||
556 | @internaltool( |
|
556 | @internaltool( | |
557 | b'merge3-lie-about-conflicts', |
|
557 | b'merge3-lie-about-conflicts', | |
558 | fullmerge, |
|
558 | fullmerge, | |
559 | b'', |
|
559 | b'', | |
560 | precheck=_mergecheck, |
|
560 | precheck=_mergecheck, | |
561 | ) |
|
561 | ) | |
562 | def _imerge3alwaysgood(*args, **kwargs): |
|
562 | def _imerge3alwaysgood(*args, **kwargs): | |
563 | # Like merge3, but record conflicts as resolved with markers in place. |
|
563 | # Like merge3, but record conflicts as resolved with markers in place. | |
564 | # |
|
564 | # | |
565 | # This is used for `diff.merge` to show the differences between |
|
565 | # This is used for `diff.merge` to show the differences between | |
566 | # the auto-merge state and the committed merge state. It may be |
|
566 | # the auto-merge state and the committed merge state. It may be | |
567 | # useful for other things. |
|
567 | # useful for other things. | |
568 | b1, junk, b2 = _imerge3(*args, **kwargs) |
|
568 | b1, junk, b2 = _imerge3(*args, **kwargs) | |
569 | # TODO is this right? I'm not sure what these return values mean, |
|
569 | # TODO is this right? I'm not sure what these return values mean, | |
570 | # but as far as I can tell this will indicate to callers tha the |
|
570 | # but as far as I can tell this will indicate to callers tha the | |
571 | # merge succeeded. |
|
571 | # merge succeeded. | |
572 | return b1, False, b2 |
|
572 | return b1, False, b2 | |
573 |
|
573 | |||
574 |
|
574 | |||
575 | @internaltool( |
|
575 | @internaltool( | |
576 | b'mergediff', |
|
576 | b'mergediff', | |
577 | fullmerge, |
|
577 | fullmerge, | |
578 | _( |
|
578 | _( | |
579 | b"warning: conflicts while merging %s! " |
|
579 | b"warning: conflicts while merging %s! " | |
580 | b"(edit, then use 'hg resolve --mark')\n" |
|
580 | b"(edit, then use 'hg resolve --mark')\n" | |
581 | ), |
|
581 | ), | |
582 | precheck=_mergecheck, |
|
582 | precheck=_mergecheck, | |
583 | ) |
|
583 | ) | |
584 | def _imerge_diff(repo, mynode, local, other, base, toolconf, backup): |
|
584 | def _imerge_diff(repo, mynode, local, other, base, toolconf, backup): | |
585 | """ |
|
585 | """ | |
586 | Uses the internal non-interactive simple merge algorithm for merging |
|
586 | Uses the internal non-interactive simple merge algorithm for merging | |
587 | files. It will fail if there are any conflicts and leave markers in |
|
587 | files. It will fail if there are any conflicts and leave markers in | |
588 | the partially merged file. The marker will have two sections, one with the |
|
588 | the partially merged file. The marker will have two sections, one with the | |
589 | content from one side of the merge, and one with a diff from the base |
|
589 | content from one side of the merge, and one with a diff from the base | |
590 | content to the content on the other side. (experimental)""" |
|
590 | content to the content on the other side. (experimental)""" | |
591 | return _merge(repo, local, other, base, b'mergediff') |
|
591 | return _merge(repo, local, other, base, b'mergediff') | |
592 |
|
592 | |||
593 |
|
593 | |||
594 | @internaltool(b'merge-local', mergeonly, precheck=_mergecheck) |
|
594 | @internaltool(b'merge-local', mergeonly, precheck=_mergecheck) | |
595 | def _imergelocal(repo, mynode, local, other, base, toolconf, backup): |
|
595 | def _imergelocal(repo, mynode, local, other, base, toolconf, backup): | |
596 | """ |
|
596 | """ | |
597 | Like :merge, but resolve all conflicts non-interactively in favor |
|
597 | Like :merge, but resolve all conflicts non-interactively in favor | |
598 | of the local `p1()` changes.""" |
|
598 | of the local `p1()` changes.""" | |
599 | return _merge(repo, local, other, base, b'local') |
|
599 | return _merge(repo, local, other, base, b'local') | |
600 |
|
600 | |||
601 |
|
601 | |||
602 | @internaltool(b'merge-other', mergeonly, precheck=_mergecheck) |
|
602 | @internaltool(b'merge-other', mergeonly, precheck=_mergecheck) | |
603 | def _imergeother(repo, mynode, local, other, base, toolconf, backup): |
|
603 | def _imergeother(repo, mynode, local, other, base, toolconf, backup): | |
604 | """ |
|
604 | """ | |
605 | Like :merge, but resolve all conflicts non-interactively in favor |
|
605 | Like :merge, but resolve all conflicts non-interactively in favor | |
606 | of the other `p2()` changes.""" |
|
606 | of the other `p2()` changes.""" | |
607 | return _merge(repo, local, other, base, b'other') |
|
607 | return _merge(repo, local, other, base, b'other') | |
608 |
|
608 | |||
609 |
|
609 | |||
610 | @internaltool( |
|
610 | @internaltool( | |
611 | b'tagmerge', |
|
611 | b'tagmerge', | |
612 | mergeonly, |
|
612 | mergeonly, | |
613 | _( |
|
613 | _( | |
614 | b"automatic tag merging of %s failed! " |
|
614 | b"automatic tag merging of %s failed! " | |
615 | b"(use 'hg resolve --tool :merge' or another merge " |
|
615 | b"(use 'hg resolve --tool :merge' or another merge " | |
616 | b"tool of your choice)\n" |
|
616 | b"tool of your choice)\n" | |
617 | ), |
|
617 | ), | |
618 | ) |
|
618 | ) | |
619 | def _itagmerge(repo, mynode, local, other, base, toolconf, backup): |
|
619 | def _itagmerge(repo, mynode, local, other, base, toolconf, backup): | |
620 | """ |
|
620 | """ | |
621 | Uses the internal tag merge algorithm (experimental). |
|
621 | Uses the internal tag merge algorithm (experimental). | |
622 | """ |
|
622 | """ | |
623 | success, status = tagmerge.merge(repo, local.fctx, other.fctx, base.fctx) |
|
623 | success, status = tagmerge.merge(repo, local.fctx, other.fctx, base.fctx) | |
624 | return success, status, False |
|
624 | return success, status, False | |
625 |
|
625 | |||
626 |
|
626 | |||
627 | @internaltool(b'dump', fullmerge, binary=True, symlink=True) |
|
627 | @internaltool(b'dump', fullmerge, binary=True, symlink=True) | |
628 | def _idump(repo, mynode, local, other, base, toolconf, backup): |
|
628 | def _idump(repo, mynode, local, other, base, toolconf, backup): | |
629 | """ |
|
629 | """ | |
630 | Creates three versions of the files to merge, containing the |
|
630 | Creates three versions of the files to merge, containing the | |
631 | contents of local, other and base. These files can then be used to |
|
631 | contents of local, other and base. These files can then be used to | |
632 | perform a merge manually. If the file to be merged is named |
|
632 | perform a merge manually. If the file to be merged is named | |
633 | ``a.txt``, these files will accordingly be named ``a.txt.local``, |
|
633 | ``a.txt``, these files will accordingly be named ``a.txt.local``, | |
634 | ``a.txt.other`` and ``a.txt.base`` and they will be placed in the |
|
634 | ``a.txt.other`` and ``a.txt.base`` and they will be placed in the | |
635 | same directory as ``a.txt``. |
|
635 | same directory as ``a.txt``. | |
636 |
|
636 | |||
637 | This implies premerge. Therefore, files aren't dumped, if premerge |
|
637 | This implies premerge. Therefore, files aren't dumped, if premerge | |
638 | runs successfully. Use :forcedump to forcibly write files out. |
|
638 | runs successfully. Use :forcedump to forcibly write files out. | |
639 | """ |
|
639 | """ | |
640 | a = _workingpath(repo, local.fctx) |
|
640 | a = _workingpath(repo, local.fctx) | |
641 | fd = local.fctx.path() |
|
641 | fd = local.fctx.path() | |
642 |
|
642 | |||
643 | from . import context |
|
643 | from . import context | |
644 |
|
644 | |||
645 | if isinstance(local.fctx, context.overlayworkingfilectx): |
|
645 | if isinstance(local.fctx, context.overlayworkingfilectx): | |
646 | raise error.InMemoryMergeConflictsError( |
|
646 | raise error.InMemoryMergeConflictsError( | |
647 | b'in-memory merge does not support the :dump tool.' |
|
647 | b'in-memory merge does not support the :dump tool.' | |
648 | ) |
|
648 | ) | |
649 |
|
649 | |||
650 | util.writefile(a + b".local", local.fctx.decodeddata()) |
|
650 | util.writefile(a + b".local", local.fctx.decodeddata()) | |
651 | repo.wwrite(fd + b".other", other.fctx.data(), other.fctx.flags()) |
|
651 | repo.wwrite(fd + b".other", other.fctx.data(), other.fctx.flags()) | |
652 | repo.wwrite(fd + b".base", base.fctx.data(), base.fctx.flags()) |
|
652 | repo.wwrite(fd + b".base", base.fctx.data(), base.fctx.flags()) | |
653 | return False, 1, False |
|
653 | return False, 1, False | |
654 |
|
654 | |||
655 |
|
655 | |||
656 | @internaltool(b'forcedump', mergeonly, binary=True, symlink=True) |
|
656 | @internaltool(b'forcedump', mergeonly, binary=True, symlink=True) | |
657 | def _forcedump(repo, mynode, local, other, base, toolconf, backup): |
|
657 | def _forcedump(repo, mynode, local, other, base, toolconf, backup): | |
658 | """ |
|
658 | """ | |
659 | Creates three versions of the files as same as :dump, but omits premerge. |
|
659 | Creates three versions of the files as same as :dump, but omits premerge. | |
660 | """ |
|
660 | """ | |
661 | return _idump(repo, mynode, local, other, base, toolconf, backup) |
|
661 | return _idump(repo, mynode, local, other, base, toolconf, backup) | |
662 |
|
662 | |||
663 |
|
663 | |||
664 | def _xmergeimm(repo, mynode, local, other, base, toolconf, backup): |
|
664 | def _xmergeimm(repo, mynode, local, other, base, toolconf, backup): | |
665 | # In-memory merge simply raises an exception on all external merge tools, |
|
665 | # In-memory merge simply raises an exception on all external merge tools, | |
666 | # for now. |
|
666 | # for now. | |
667 | # |
|
667 | # | |
668 | # It would be possible to run most tools with temporary files, but this |
|
668 | # It would be possible to run most tools with temporary files, but this | |
669 | # raises the question of what to do if the user only partially resolves the |
|
669 | # raises the question of what to do if the user only partially resolves the | |
670 | # file -- we can't leave a merge state. (Copy to somewhere in the .hg/ |
|
670 | # file -- we can't leave a merge state. (Copy to somewhere in the .hg/ | |
671 | # directory and tell the user how to get it is my best idea, but it's |
|
671 | # directory and tell the user how to get it is my best idea, but it's | |
672 | # clunky.) |
|
672 | # clunky.) | |
673 | raise error.InMemoryMergeConflictsError( |
|
673 | raise error.InMemoryMergeConflictsError( | |
674 | b'in-memory merge does not support external merge tools' |
|
674 | b'in-memory merge does not support external merge tools' | |
675 | ) |
|
675 | ) | |
676 |
|
676 | |||
677 |
|
677 | |||
678 | def _describemerge(ui, repo, mynode, fcl, fcb, fco, env, toolpath, args): |
|
678 | def _describemerge(ui, repo, mynode, fcl, fcb, fco, env, toolpath, args): | |
679 | tmpl = ui.config(b'command-templates', b'pre-merge-tool-output') |
|
679 | tmpl = ui.config(b'command-templates', b'pre-merge-tool-output') | |
680 | if not tmpl: |
|
680 | if not tmpl: | |
681 | return |
|
681 | return | |
682 |
|
682 | |||
683 | mappingdict = templateutil.mappingdict |
|
683 | mappingdict = templateutil.mappingdict | |
684 | props = { |
|
684 | props = { | |
685 | b'ctx': fcl.changectx(), |
|
685 | b'ctx': fcl.changectx(), | |
686 | b'node': hex(mynode), |
|
686 | b'node': hex(mynode), | |
687 | b'path': fcl.path(), |
|
687 | b'path': fcl.path(), | |
688 | b'local': mappingdict( |
|
688 | b'local': mappingdict( | |
689 | { |
|
689 | { | |
690 | b'ctx': fcl.changectx(), |
|
690 | b'ctx': fcl.changectx(), | |
691 | b'fctx': fcl, |
|
691 | b'fctx': fcl, | |
692 | b'node': hex(mynode), |
|
692 | b'node': hex(mynode), | |
693 | b'name': _(b'local'), |
|
693 | b'name': _(b'local'), | |
694 | b'islink': b'l' in fcl.flags(), |
|
694 | b'islink': b'l' in fcl.flags(), | |
695 | b'label': env[b'HG_MY_LABEL'], |
|
695 | b'label': env[b'HG_MY_LABEL'], | |
696 | } |
|
696 | } | |
697 | ), |
|
697 | ), | |
698 | b'base': mappingdict( |
|
698 | b'base': mappingdict( | |
699 | { |
|
699 | { | |
700 | b'ctx': fcb.changectx(), |
|
700 | b'ctx': fcb.changectx(), | |
701 | b'fctx': fcb, |
|
701 | b'fctx': fcb, | |
702 | b'name': _(b'base'), |
|
702 | b'name': _(b'base'), | |
703 | b'islink': b'l' in fcb.flags(), |
|
703 | b'islink': b'l' in fcb.flags(), | |
704 | b'label': env[b'HG_BASE_LABEL'], |
|
704 | b'label': env[b'HG_BASE_LABEL'], | |
705 | } |
|
705 | } | |
706 | ), |
|
706 | ), | |
707 | b'other': mappingdict( |
|
707 | b'other': mappingdict( | |
708 | { |
|
708 | { | |
709 | b'ctx': fco.changectx(), |
|
709 | b'ctx': fco.changectx(), | |
710 | b'fctx': fco, |
|
710 | b'fctx': fco, | |
711 | b'name': _(b'other'), |
|
711 | b'name': _(b'other'), | |
712 | b'islink': b'l' in fco.flags(), |
|
712 | b'islink': b'l' in fco.flags(), | |
713 | b'label': env[b'HG_OTHER_LABEL'], |
|
713 | b'label': env[b'HG_OTHER_LABEL'], | |
714 | } |
|
714 | } | |
715 | ), |
|
715 | ), | |
716 | b'toolpath': toolpath, |
|
716 | b'toolpath': toolpath, | |
717 | b'toolargs': args, |
|
717 | b'toolargs': args, | |
718 | } |
|
718 | } | |
719 |
|
719 | |||
720 | # TODO: make all of this something that can be specified on a per-tool basis |
|
720 | # TODO: make all of this something that can be specified on a per-tool basis | |
721 | tmpl = templater.unquotestring(tmpl) |
|
721 | tmpl = templater.unquotestring(tmpl) | |
722 |
|
722 | |||
723 | # Not using cmdutil.rendertemplate here since it causes errors importing |
|
723 | # Not using cmdutil.rendertemplate here since it causes errors importing | |
724 | # things for us to import cmdutil. |
|
724 | # things for us to import cmdutil. | |
725 | tres = formatter.templateresources(ui, repo) |
|
725 | tres = formatter.templateresources(ui, repo) | |
726 | t = formatter.maketemplater( |
|
726 | t = formatter.maketemplater( | |
727 | ui, tmpl, defaults=templatekw.keywords, resources=tres |
|
727 | ui, tmpl, defaults=templatekw.keywords, resources=tres | |
728 | ) |
|
728 | ) | |
729 | ui.status(t.renderdefault(props)) |
|
729 | ui.status(t.renderdefault(props)) | |
730 |
|
730 | |||
731 |
|
731 | |||
732 | def _xmerge(repo, mynode, local, other, base, toolconf, backup): |
|
732 | def _xmerge(repo, mynode, local, other, base, toolconf, backup): | |
733 | fcd = local.fctx |
|
733 | fcd = local.fctx | |
734 | fco = other.fctx |
|
734 | fco = other.fctx | |
735 | fca = base.fctx |
|
735 | fca = base.fctx | |
736 | tool, toolpath, binary, symlink, scriptfn = toolconf |
|
736 | tool, toolpath, binary, symlink, scriptfn = toolconf | |
737 | uipathfn = scmutil.getuipathfn(repo) |
|
737 | uipathfn = scmutil.getuipathfn(repo) | |
738 | if fcd.isabsent() or fco.isabsent(): |
|
738 | if fcd.isabsent() or fco.isabsent(): | |
739 | repo.ui.warn( |
|
739 | repo.ui.warn( | |
740 | _(b'warning: %s cannot merge change/delete conflict for %s\n') |
|
740 | _(b'warning: %s cannot merge change/delete conflict for %s\n') | |
741 | % (tool, uipathfn(fcd.path())) |
|
741 | % (tool, uipathfn(fcd.path())) | |
742 | ) |
|
742 | ) | |
743 | return False, 1, None |
|
743 | return False, 1, None | |
744 | localpath = _workingpath(repo, fcd) |
|
744 | localpath = _workingpath(repo, fcd) | |
745 | args = _toolstr(repo.ui, tool, b"args") |
|
745 | args = _toolstr(repo.ui, tool, b"args") | |
746 |
|
746 | |||
747 | with _maketempfiles( |
|
747 | with _maketempfiles( | |
748 | repo, fco, fca, repo.wvfs.join(backup.path()), b"$output" in args |
|
748 | repo, fco, fca, repo.wvfs.join(backup.path()), b"$output" in args | |
749 | ) as temppaths: |
|
749 | ) as temppaths: | |
750 | basepath, otherpath, localoutputpath = temppaths |
|
750 | basepath, otherpath, localoutputpath = temppaths | |
751 | outpath = b"" |
|
751 | outpath = b"" | |
752 |
|
752 | |||
753 | def format_label(input): |
|
753 | def format_label(input): | |
754 | if input.label_detail: |
|
754 | if input.label_detail: | |
755 | return b'%s: %s' % (input.label, input.label_detail) |
|
755 | return b'%s: %s' % (input.label, input.label_detail) | |
756 | else: |
|
756 | else: | |
757 | return input.label |
|
757 | return input.label | |
758 |
|
758 | |||
759 | env = { |
|
759 | env = { | |
760 | b'HG_FILE': fcd.path(), |
|
760 | b'HG_FILE': fcd.path(), | |
761 | b'HG_MY_NODE': short(mynode), |
|
761 | b'HG_MY_NODE': short(mynode), | |
762 | b'HG_OTHER_NODE': short(fco.changectx().node()), |
|
762 | b'HG_OTHER_NODE': short(fco.changectx().node()), | |
763 | b'HG_BASE_NODE': short(fca.changectx().node()), |
|
763 | b'HG_BASE_NODE': short(fca.changectx().node()), | |
764 | b'HG_MY_ISLINK': b'l' in fcd.flags(), |
|
764 | b'HG_MY_ISLINK': b'l' in fcd.flags(), | |
765 | b'HG_OTHER_ISLINK': b'l' in fco.flags(), |
|
765 | b'HG_OTHER_ISLINK': b'l' in fco.flags(), | |
766 | b'HG_BASE_ISLINK': b'l' in fca.flags(), |
|
766 | b'HG_BASE_ISLINK': b'l' in fca.flags(), | |
767 | b'HG_MY_LABEL': format_label(local), |
|
767 | b'HG_MY_LABEL': format_label(local), | |
768 | b'HG_OTHER_LABEL': format_label(other), |
|
768 | b'HG_OTHER_LABEL': format_label(other), | |
769 | b'HG_BASE_LABEL': format_label(base), |
|
769 | b'HG_BASE_LABEL': format_label(base), | |
770 | } |
|
770 | } | |
771 | ui = repo.ui |
|
771 | ui = repo.ui | |
772 |
|
772 | |||
773 | if b"$output" in args: |
|
773 | if b"$output" in args: | |
774 | # read input from backup, write to original |
|
774 | # read input from backup, write to original | |
775 | outpath = localpath |
|
775 | outpath = localpath | |
776 | localpath = localoutputpath |
|
776 | localpath = localoutputpath | |
777 | replace = { |
|
777 | replace = { | |
778 | b'local': localpath, |
|
778 | b'local': localpath, | |
779 | b'base': basepath, |
|
779 | b'base': basepath, | |
780 | b'other': otherpath, |
|
780 | b'other': otherpath, | |
781 | b'output': outpath, |
|
781 | b'output': outpath, | |
782 | b'labellocal': format_label(local), |
|
782 | b'labellocal': format_label(local), | |
783 | b'labelother': format_label(other), |
|
783 | b'labelother': format_label(other), | |
784 | b'labelbase': format_label(base), |
|
784 | b'labelbase': format_label(base), | |
785 | } |
|
785 | } | |
786 | args = util.interpolate( |
|
786 | args = util.interpolate( | |
787 | br'\$', |
|
787 | br'\$', | |
788 | replace, |
|
788 | replace, | |
789 | args, |
|
789 | args, | |
790 | lambda s: procutil.shellquote(util.localpath(s)), |
|
790 | lambda s: procutil.shellquote(util.localpath(s)), | |
791 | ) |
|
791 | ) | |
792 | if _toolbool(ui, tool, b"gui"): |
|
792 | if _toolbool(ui, tool, b"gui"): | |
793 | repo.ui.status( |
|
793 | repo.ui.status( | |
794 | _(b'running merge tool %s for file %s\n') |
|
794 | _(b'running merge tool %s for file %s\n') | |
795 | % (tool, uipathfn(fcd.path())) |
|
795 | % (tool, uipathfn(fcd.path())) | |
796 | ) |
|
796 | ) | |
797 | if scriptfn is None: |
|
797 | if scriptfn is None: | |
798 | cmd = toolpath + b' ' + args |
|
798 | cmd = toolpath + b' ' + args | |
799 | repo.ui.debug(b'launching merge tool: %s\n' % cmd) |
|
799 | repo.ui.debug(b'launching merge tool: %s\n' % cmd) | |
800 | _describemerge(ui, repo, mynode, fcd, fca, fco, env, toolpath, args) |
|
800 | _describemerge(ui, repo, mynode, fcd, fca, fco, env, toolpath, args) | |
801 | r = ui.system( |
|
801 | r = ui.system( | |
802 | cmd, cwd=repo.root, environ=env, blockedtag=b'mergetool' |
|
802 | cmd, cwd=repo.root, environ=env, blockedtag=b'mergetool' | |
803 | ) |
|
803 | ) | |
804 | else: |
|
804 | else: | |
805 | repo.ui.debug( |
|
805 | repo.ui.debug( | |
806 | b'launching python merge script: %s:%s\n' % (toolpath, scriptfn) |
|
806 | b'launching python merge script: %s:%s\n' % (toolpath, scriptfn) | |
807 | ) |
|
807 | ) | |
808 | r = 0 |
|
808 | r = 0 | |
809 | try: |
|
809 | try: | |
810 | # avoid cycle cmdutil->merge->filemerge->extensions->cmdutil |
|
810 | # avoid cycle cmdutil->merge->filemerge->extensions->cmdutil | |
811 | from . import extensions |
|
811 | from . import extensions | |
812 |
|
812 | |||
813 | mod = extensions.loadpath(toolpath, b'hgmerge.%s' % tool) |
|
813 | mod = extensions.loadpath(toolpath, b'hgmerge.%s' % tool) | |
814 | except Exception: |
|
814 | except Exception: | |
815 | raise error.Abort( |
|
815 | raise error.Abort( | |
816 | _(b"loading python merge script failed: %s") % toolpath |
|
816 | _(b"loading python merge script failed: %s") % toolpath | |
817 | ) |
|
817 | ) | |
818 | mergefn = getattr(mod, scriptfn, None) |
|
818 | mergefn = getattr(mod, scriptfn, None) | |
819 | if mergefn is None: |
|
819 | if mergefn is None: | |
820 | raise error.Abort( |
|
820 | raise error.Abort( | |
821 | _(b"%s does not have function: %s") % (toolpath, scriptfn) |
|
821 | _(b"%s does not have function: %s") % (toolpath, scriptfn) | |
822 | ) |
|
822 | ) | |
823 | argslist = procutil.shellsplit(args) |
|
823 | argslist = procutil.shellsplit(args) | |
824 | # avoid cycle cmdutil->merge->filemerge->hook->extensions->cmdutil |
|
824 | # avoid cycle cmdutil->merge->filemerge->hook->extensions->cmdutil | |
825 | from . import hook |
|
825 | from . import hook | |
826 |
|
826 | |||
827 | ret, raised = hook.pythonhook( |
|
827 | ret, raised = hook.pythonhook( | |
828 | ui, repo, b"merge", toolpath, mergefn, {b'args': argslist}, True |
|
828 | ui, repo, b"merge", toolpath, mergefn, {b'args': argslist}, True | |
829 | ) |
|
829 | ) | |
830 | if raised: |
|
830 | if raised: | |
831 | r = 1 |
|
831 | r = 1 | |
832 | repo.ui.debug(b'merge tool returned: %d\n' % r) |
|
832 | repo.ui.debug(b'merge tool returned: %d\n' % r) | |
833 | return True, r, False |
|
833 | return True, r, False | |
834 |
|
834 | |||
835 |
|
835 | |||
836 | def _populate_label_detail(input, template): |
|
836 | def _populate_label_detail(input, template): | |
837 | """Applies the given template to the ctx and stores it in the input.""" |
|
837 | """Applies the given template to the ctx and stores it in the input.""" | |
838 | ctx = input.fctx.changectx() |
|
838 | ctx = input.fctx.changectx() | |
839 | if ctx.node() is None: |
|
839 | if ctx.node() is None: | |
840 | ctx = ctx.p1() |
|
840 | ctx = ctx.p1() | |
841 |
|
841 | |||
842 | props = {b'ctx': ctx} |
|
842 | props = {b'ctx': ctx} | |
843 | templateresult = template.renderdefault(props) |
|
843 | templateresult = template.renderdefault(props) | |
844 | input.label_detail = templateresult.splitlines()[0] # split for safety |
|
844 | input.label_detail = templateresult.splitlines()[0] # split for safety | |
845 |
|
845 | |||
846 |
|
846 | |||
847 | def _populate_label_details(repo, inputs, tool=None): |
|
847 | def _populate_label_details(repo, inputs, tool=None): | |
848 | """Populates the label details using the conflict marker template.""" |
|
848 | """Populates the label details using the conflict marker template.""" | |
849 | ui = repo.ui |
|
849 | ui = repo.ui | |
850 | template = ui.config(b'command-templates', b'mergemarker') |
|
850 | template = ui.config(b'command-templates', b'mergemarker') | |
851 | if tool is not None: |
|
851 | if tool is not None: | |
852 | template = _toolstr(ui, tool, b'mergemarkertemplate', template) |
|
852 | template = _toolstr(ui, tool, b'mergemarkertemplate', template) | |
853 | template = templater.unquotestring(template) |
|
853 | template = templater.unquotestring(template) | |
854 | tres = formatter.templateresources(ui, repo) |
|
854 | tres = formatter.templateresources(ui, repo) | |
855 | tmpl = formatter.maketemplater( |
|
855 | tmpl = formatter.maketemplater( | |
856 | ui, template, defaults=templatekw.keywords, resources=tres |
|
856 | ui, template, defaults=templatekw.keywords, resources=tres | |
857 | ) |
|
857 | ) | |
858 |
|
858 | |||
859 | for input in inputs: |
|
859 | for input in inputs: | |
860 | _populate_label_detail(input, tmpl) |
|
860 | _populate_label_detail(input, tmpl) | |
861 |
|
861 | |||
862 |
|
862 | |||
863 | def partextras(labels): |
|
863 | def partextras(labels): | |
864 | """Return a dictionary of extra labels for use in prompts to the user |
|
864 | """Return a dictionary of extra labels for use in prompts to the user | |
865 |
|
865 | |||
866 | Intended use is in strings of the form "(l)ocal%(l)s". |
|
866 | Intended use is in strings of the form "(l)ocal%(l)s". | |
867 | """ |
|
867 | """ | |
868 | if labels is None: |
|
868 | if labels is None: | |
869 | return { |
|
869 | return { | |
870 | b"l": b"", |
|
870 | b"l": b"", | |
871 | b"o": b"", |
|
871 | b"o": b"", | |
872 | } |
|
872 | } | |
873 |
|
873 | |||
874 | return { |
|
874 | return { | |
875 | b"l": b" [%s]" % labels[0], |
|
875 | b"l": b" [%s]" % labels[0], | |
876 | b"o": b" [%s]" % labels[1], |
|
876 | b"o": b" [%s]" % labels[1], | |
877 | } |
|
877 | } | |
878 |
|
878 | |||
879 |
|
879 | |||
880 | def _makebackup(repo, ui, wctx, fcd): |
|
880 | def _makebackup(repo, ui, wctx, fcd): | |
881 | """Makes and returns a filectx-like object for ``fcd``'s backup file. |
|
881 | """Makes and returns a filectx-like object for ``fcd``'s backup file. | |
882 |
|
882 | |||
883 | In addition to preserving the user's pre-existing modifications to `fcd` |
|
883 | In addition to preserving the user's pre-existing modifications to `fcd` | |
884 | (if any), the backup is used to undo certain premerges, confirm whether a |
|
884 | (if any), the backup is used to undo certain premerges, confirm whether a | |
885 | merge changed anything, and determine what line endings the new file should |
|
885 | merge changed anything, and determine what line endings the new file should | |
886 | have. |
|
886 | have. | |
887 |
|
887 | |||
888 | Backups only need to be written once since their content doesn't change |
|
888 | Backups only need to be written once since their content doesn't change | |
889 | afterwards. |
|
889 | afterwards. | |
890 | """ |
|
890 | """ | |
891 | if fcd.isabsent(): |
|
891 | if fcd.isabsent(): | |
892 | return None |
|
892 | return None | |
893 | # TODO: Break this import cycle somehow. (filectx -> ctx -> fileset -> |
|
893 | # TODO: Break this import cycle somehow. (filectx -> ctx -> fileset -> | |
894 | # merge -> filemerge). (I suspect the fileset import is the weakest link) |
|
894 | # merge -> filemerge). (I suspect the fileset import is the weakest link) | |
895 | from . import context |
|
895 | from . import context | |
896 |
|
896 | |||
897 | backup = scmutil.backuppath(ui, repo, fcd.path()) |
|
897 | backup = scmutil.backuppath(ui, repo, fcd.path()) | |
898 | inworkingdir = backup.startswith(repo.wvfs.base) and not backup.startswith( |
|
898 | inworkingdir = backup.startswith(repo.wvfs.base) and not backup.startswith( | |
899 | repo.vfs.base |
|
899 | repo.vfs.base | |
900 | ) |
|
900 | ) | |
901 | if isinstance(fcd, context.overlayworkingfilectx) and inworkingdir: |
|
901 | if isinstance(fcd, context.overlayworkingfilectx) and inworkingdir: | |
902 | # If the backup file is to be in the working directory, and we're |
|
902 | # If the backup file is to be in the working directory, and we're | |
903 | # merging in-memory, we must redirect the backup to the memory context |
|
903 | # merging in-memory, we must redirect the backup to the memory context | |
904 | # so we don't disturb the working directory. |
|
904 | # so we don't disturb the working directory. | |
905 | relpath = backup[len(repo.wvfs.base) + 1 :] |
|
905 | relpath = backup[len(repo.wvfs.base) + 1 :] | |
906 | wctx[relpath].write(fcd.data(), fcd.flags()) |
|
906 | wctx[relpath].write(fcd.data(), fcd.flags()) | |
907 | return wctx[relpath] |
|
907 | return wctx[relpath] | |
908 | else: |
|
908 | else: | |
909 | # Otherwise, write to wherever path the user specified the backups |
|
909 | # Otherwise, write to wherever path the user specified the backups | |
910 | # should go. We still need to switch based on whether the source is |
|
910 | # should go. We still need to switch based on whether the source is | |
911 | # in-memory so we can use the fast path of ``util.copy`` if both are |
|
911 | # in-memory so we can use the fast path of ``util.copy`` if both are | |
912 | # on disk. |
|
912 | # on disk. | |
913 | if isinstance(fcd, context.overlayworkingfilectx): |
|
913 | if isinstance(fcd, context.overlayworkingfilectx): | |
914 | util.writefile(backup, fcd.data()) |
|
914 | util.writefile(backup, fcd.data()) | |
915 | else: |
|
915 | else: | |
916 | a = _workingpath(repo, fcd) |
|
916 | a = _workingpath(repo, fcd) | |
917 | util.copyfile(a, backup) |
|
917 | util.copyfile(a, backup) | |
918 | # A arbitraryfilectx is returned, so we can run the same functions on |
|
918 | # A arbitraryfilectx is returned, so we can run the same functions on | |
919 | # the backup context regardless of where it lives. |
|
919 | # the backup context regardless of where it lives. | |
920 | return context.arbitraryfilectx(backup, repo=repo) |
|
920 | return context.arbitraryfilectx(backup, repo=repo) | |
921 |
|
921 | |||
922 |
|
922 | |||
923 | @contextlib.contextmanager |
|
923 | @contextlib.contextmanager | |
924 | def _maketempfiles(repo, fco, fca, localpath, uselocalpath): |
|
924 | def _maketempfiles(repo, fco, fca, localpath, uselocalpath): | |
925 | """Writes out `fco` and `fca` as temporary files, and (if uselocalpath) |
|
925 | """Writes out `fco` and `fca` as temporary files, and (if uselocalpath) | |
926 | copies `localpath` to another temporary file, so an external merge tool may |
|
926 | copies `localpath` to another temporary file, so an external merge tool may | |
927 | use them. |
|
927 | use them. | |
928 | """ |
|
928 | """ | |
929 | tmproot = None |
|
929 | tmproot = None | |
930 | tmprootprefix = repo.ui.config(b'experimental', b'mergetempdirprefix') |
|
930 | tmprootprefix = repo.ui.config(b'experimental', b'mergetempdirprefix') | |
931 | if tmprootprefix: |
|
931 | if tmprootprefix: | |
932 | tmproot = pycompat.mkdtemp(prefix=tmprootprefix) |
|
932 | tmproot = pycompat.mkdtemp(prefix=tmprootprefix) | |
933 |
|
933 | |||
934 | def maketempfrompath(prefix, path): |
|
934 | def maketempfrompath(prefix, path): | |
935 | fullbase, ext = os.path.splitext(path) |
|
935 | fullbase, ext = os.path.splitext(path) | |
936 | pre = b"%s~%s" % (os.path.basename(fullbase), prefix) |
|
936 | pre = b"%s~%s" % (os.path.basename(fullbase), prefix) | |
937 | if tmproot: |
|
937 | if tmproot: | |
938 | name = os.path.join(tmproot, pre) |
|
938 | name = os.path.join(tmproot, pre) | |
939 | if ext: |
|
939 | if ext: | |
940 | name += ext |
|
940 | name += ext | |
941 | f = open(name, "wb") |
|
941 | f = open(name, "wb") | |
942 | else: |
|
942 | else: | |
943 | fd, name = pycompat.mkstemp(prefix=pre + b'.', suffix=ext) |
|
943 | fd, name = pycompat.mkstemp(prefix=pre + b'.', suffix=ext) | |
944 | f = os.fdopen(fd, "wb") |
|
944 | f = os.fdopen(fd, "wb") | |
945 | return f, name |
|
945 | return f, name | |
946 |
|
946 | |||
947 | def tempfromcontext(prefix, ctx): |
|
947 | def tempfromcontext(prefix, ctx): | |
948 | f, name = maketempfrompath(prefix, ctx.path()) |
|
948 | f, name = maketempfrompath(prefix, ctx.path()) | |
949 | data = ctx.decodeddata() |
|
949 | data = ctx.decodeddata() | |
950 | f.write(data) |
|
950 | f.write(data) | |
951 | f.close() |
|
951 | f.close() | |
952 | return name |
|
952 | return name | |
953 |
|
953 | |||
954 | b = tempfromcontext(b"base", fca) |
|
954 | b = tempfromcontext(b"base", fca) | |
955 | c = tempfromcontext(b"other", fco) |
|
955 | c = tempfromcontext(b"other", fco) | |
956 | d = localpath |
|
956 | d = localpath | |
957 | if uselocalpath: |
|
957 | if uselocalpath: | |
958 | # We start off with this being the backup filename, so remove the .orig |
|
958 | # We start off with this being the backup filename, so remove the .orig | |
959 | # to make syntax-highlighting more likely. |
|
959 | # to make syntax-highlighting more likely. | |
960 | if d.endswith(b'.orig'): |
|
960 | if d.endswith(b'.orig'): | |
961 | d, _ = os.path.splitext(d) |
|
961 | d, _ = os.path.splitext(d) | |
962 | f, d = maketempfrompath(b"local", d) |
|
962 | f, d = maketempfrompath(b"local", d) | |
963 | with open(localpath, b'rb') as src: |
|
963 | with open(localpath, b'rb') as src: | |
964 | f.write(src.read()) |
|
964 | f.write(src.read()) | |
965 | f.close() |
|
965 | f.close() | |
966 |
|
966 | |||
967 | try: |
|
967 | try: | |
968 | yield b, c, d |
|
968 | yield b, c, d | |
969 | finally: |
|
969 | finally: | |
970 | if tmproot: |
|
970 | if tmproot: | |
971 | shutil.rmtree(tmproot) |
|
971 | shutil.rmtree(tmproot) | |
972 | else: |
|
972 | else: | |
973 | util.unlink(b) |
|
973 | util.unlink(b) | |
974 | util.unlink(c) |
|
974 | util.unlink(c) | |
975 | # if not uselocalpath, d is the 'orig'/backup file which we |
|
975 | # if not uselocalpath, d is the 'orig'/backup file which we | |
976 | # shouldn't delete. |
|
976 | # shouldn't delete. | |
977 | if d and uselocalpath: |
|
977 | if d and uselocalpath: | |
978 | util.unlink(d) |
|
978 | util.unlink(d) | |
979 |
|
979 | |||
980 |
|
980 | |||
981 | def filemerge(repo, wctx, mynode, orig, fcd, fco, fca, labels=None): |
|
981 | def filemerge(repo, wctx, mynode, orig, fcd, fco, fca, labels=None): | |
982 | """perform a 3-way merge in the working directory |
|
982 | """perform a 3-way merge in the working directory | |
983 |
|
983 | |||
984 | mynode = parent node before merge |
|
984 | mynode = parent node before merge | |
985 | orig = original local filename before merge |
|
985 | orig = original local filename before merge | |
986 | fco = other file context |
|
986 | fco = other file context | |
987 | fca = ancestor file context |
|
987 | fca = ancestor file context | |
988 | fcd = local file context for current/destination file |
|
988 | fcd = local file context for current/destination file | |
989 |
|
989 | |||
990 | Returns whether the merge is complete, the return value of the merge, and |
|
990 | Returns whether the merge is complete, the return value of the merge, and | |
991 | a boolean indicating whether the file was deleted from disk.""" |
|
991 | a boolean indicating whether the file was deleted from disk.""" | |
992 |
|
||||
993 | if not fco.cmp(fcd): # files identical? |
|
|||
994 | return None, False |
|
|||
995 |
|
||||
996 | ui = repo.ui |
|
992 | ui = repo.ui | |
997 | fd = fcd.path() |
|
993 | fd = fcd.path() | |
998 | uipathfn = scmutil.getuipathfn(repo) |
|
994 | uipathfn = scmutil.getuipathfn(repo) | |
999 | fduipath = uipathfn(fd) |
|
995 | fduipath = uipathfn(fd) | |
1000 | binary = fcd.isbinary() or fco.isbinary() or fca.isbinary() |
|
996 | binary = fcd.isbinary() or fco.isbinary() or fca.isbinary() | |
1001 | symlink = b'l' in fcd.flags() + fco.flags() |
|
997 | symlink = b'l' in fcd.flags() + fco.flags() | |
1002 | changedelete = fcd.isabsent() or fco.isabsent() |
|
998 | changedelete = fcd.isabsent() or fco.isabsent() | |
1003 | tool, toolpath = _picktool(repo, ui, fd, binary, symlink, changedelete) |
|
999 | tool, toolpath = _picktool(repo, ui, fd, binary, symlink, changedelete) | |
1004 | scriptfn = None |
|
1000 | scriptfn = None | |
1005 | if tool in internals and tool.startswith(b'internal:'): |
|
1001 | if tool in internals and tool.startswith(b'internal:'): | |
1006 | # normalize to new-style names (':merge' etc) |
|
1002 | # normalize to new-style names (':merge' etc) | |
1007 | tool = tool[len(b'internal') :] |
|
1003 | tool = tool[len(b'internal') :] | |
1008 | if toolpath and toolpath.startswith(b'python:'): |
|
1004 | if toolpath and toolpath.startswith(b'python:'): | |
1009 | invalidsyntax = False |
|
1005 | invalidsyntax = False | |
1010 | if toolpath.count(b':') >= 2: |
|
1006 | if toolpath.count(b':') >= 2: | |
1011 | script, scriptfn = toolpath[7:].rsplit(b':', 1) |
|
1007 | script, scriptfn = toolpath[7:].rsplit(b':', 1) | |
1012 | if not scriptfn: |
|
1008 | if not scriptfn: | |
1013 | invalidsyntax = True |
|
1009 | invalidsyntax = True | |
1014 | # missing :callable can lead to spliting on windows drive letter |
|
1010 | # missing :callable can lead to spliting on windows drive letter | |
1015 | if b'\\' in scriptfn or b'/' in scriptfn: |
|
1011 | if b'\\' in scriptfn or b'/' in scriptfn: | |
1016 | invalidsyntax = True |
|
1012 | invalidsyntax = True | |
1017 | else: |
|
1013 | else: | |
1018 | invalidsyntax = True |
|
1014 | invalidsyntax = True | |
1019 | if invalidsyntax: |
|
1015 | if invalidsyntax: | |
1020 | raise error.Abort(_(b"invalid 'python:' syntax: %s") % toolpath) |
|
1016 | raise error.Abort(_(b"invalid 'python:' syntax: %s") % toolpath) | |
1021 | toolpath = script |
|
1017 | toolpath = script | |
1022 | ui.debug( |
|
1018 | ui.debug( | |
1023 | b"picked tool '%s' for %s (binary %s symlink %s changedelete %s)\n" |
|
1019 | b"picked tool '%s' for %s (binary %s symlink %s changedelete %s)\n" | |
1024 | % ( |
|
1020 | % ( | |
1025 | tool, |
|
1021 | tool, | |
1026 | fduipath, |
|
1022 | fduipath, | |
1027 | pycompat.bytestr(binary), |
|
1023 | pycompat.bytestr(binary), | |
1028 | pycompat.bytestr(symlink), |
|
1024 | pycompat.bytestr(symlink), | |
1029 | pycompat.bytestr(changedelete), |
|
1025 | pycompat.bytestr(changedelete), | |
1030 | ) |
|
1026 | ) | |
1031 | ) |
|
1027 | ) | |
1032 |
|
1028 | |||
1033 | if tool in internals: |
|
1029 | if tool in internals: | |
1034 | func = internals[tool] |
|
1030 | func = internals[tool] | |
1035 | mergetype = func.mergetype |
|
1031 | mergetype = func.mergetype | |
1036 | onfailure = func.onfailure |
|
1032 | onfailure = func.onfailure | |
1037 | precheck = func.precheck |
|
1033 | precheck = func.precheck | |
1038 | isexternal = False |
|
1034 | isexternal = False | |
1039 | else: |
|
1035 | else: | |
1040 | if wctx.isinmemory(): |
|
1036 | if wctx.isinmemory(): | |
1041 | func = _xmergeimm |
|
1037 | func = _xmergeimm | |
1042 | else: |
|
1038 | else: | |
1043 | func = _xmerge |
|
1039 | func = _xmerge | |
1044 | mergetype = fullmerge |
|
1040 | mergetype = fullmerge | |
1045 | onfailure = _(b"merging %s failed!\n") |
|
1041 | onfailure = _(b"merging %s failed!\n") | |
1046 | precheck = None |
|
1042 | precheck = None | |
1047 | isexternal = True |
|
1043 | isexternal = True | |
1048 |
|
1044 | |||
1049 | toolconf = tool, toolpath, binary, symlink, scriptfn |
|
1045 | toolconf = tool, toolpath, binary, symlink, scriptfn | |
1050 |
|
1046 | |||
1051 | if not labels: |
|
1047 | if not labels: | |
1052 | labels = [b'local', b'other'] |
|
1048 | labels = [b'local', b'other'] | |
1053 | if len(labels) < 3: |
|
1049 | if len(labels) < 3: | |
1054 | labels.append(b'base') |
|
1050 | labels.append(b'base') | |
1055 | local = simplemerge.MergeInput(fcd, labels[0]) |
|
1051 | local = simplemerge.MergeInput(fcd, labels[0]) | |
1056 | other = simplemerge.MergeInput(fco, labels[1]) |
|
1052 | other = simplemerge.MergeInput(fco, labels[1]) | |
1057 | base = simplemerge.MergeInput(fca, labels[2]) |
|
1053 | base = simplemerge.MergeInput(fca, labels[2]) | |
1058 | if mergetype == nomerge: |
|
1054 | if mergetype == nomerge: | |
1059 | return func( |
|
1055 | return func( | |
1060 | repo, |
|
1056 | repo, | |
1061 | mynode, |
|
1057 | mynode, | |
1062 | local, |
|
1058 | local, | |
1063 | other, |
|
1059 | other, | |
1064 | base, |
|
1060 | base, | |
1065 | toolconf, |
|
1061 | toolconf, | |
1066 | ) |
|
1062 | ) | |
1067 |
|
1063 | |||
1068 | if orig != fco.path(): |
|
1064 | if orig != fco.path(): | |
1069 | ui.status( |
|
1065 | ui.status( | |
1070 | _(b"merging %s and %s to %s\n") |
|
1066 | _(b"merging %s and %s to %s\n") | |
1071 | % (uipathfn(orig), uipathfn(fco.path()), fduipath) |
|
1067 | % (uipathfn(orig), uipathfn(fco.path()), fduipath) | |
1072 | ) |
|
1068 | ) | |
1073 | else: |
|
1069 | else: | |
1074 | ui.status(_(b"merging %s\n") % fduipath) |
|
1070 | ui.status(_(b"merging %s\n") % fduipath) | |
1075 |
|
1071 | |||
1076 | ui.debug(b"my %s other %s ancestor %s\n" % (fcd, fco, fca)) |
|
1072 | ui.debug(b"my %s other %s ancestor %s\n" % (fcd, fco, fca)) | |
1077 |
|
1073 | |||
1078 | if precheck and not precheck(repo, mynode, fcd, fco, fca, toolconf): |
|
1074 | if precheck and not precheck(repo, mynode, fcd, fco, fca, toolconf): | |
1079 | if onfailure: |
|
1075 | if onfailure: | |
1080 | if wctx.isinmemory(): |
|
1076 | if wctx.isinmemory(): | |
1081 | raise error.InMemoryMergeConflictsError( |
|
1077 | raise error.InMemoryMergeConflictsError( | |
1082 | b'in-memory merge does not support merge conflicts' |
|
1078 | b'in-memory merge does not support merge conflicts' | |
1083 | ) |
|
1079 | ) | |
1084 | ui.warn(onfailure % fduipath) |
|
1080 | ui.warn(onfailure % fduipath) | |
1085 | return 1, False |
|
1081 | return 1, False | |
1086 |
|
1082 | |||
1087 | backup = _makebackup(repo, ui, wctx, fcd) |
|
1083 | backup = _makebackup(repo, ui, wctx, fcd) | |
1088 | r = 1 |
|
1084 | r = 1 | |
1089 | try: |
|
1085 | try: | |
1090 | internalmarkerstyle = ui.config(b'ui', b'mergemarkers') |
|
1086 | internalmarkerstyle = ui.config(b'ui', b'mergemarkers') | |
1091 | if isexternal: |
|
1087 | if isexternal: | |
1092 | markerstyle = _toolstr(ui, tool, b'mergemarkers') |
|
1088 | markerstyle = _toolstr(ui, tool, b'mergemarkers') | |
1093 | else: |
|
1089 | else: | |
1094 | markerstyle = internalmarkerstyle |
|
1090 | markerstyle = internalmarkerstyle | |
1095 |
|
1091 | |||
1096 | if mergetype == fullmerge: |
|
1092 | if mergetype == fullmerge: | |
1097 | # conflict markers generated by premerge will use 'detailed' |
|
1093 | # conflict markers generated by premerge will use 'detailed' | |
1098 | # settings if either ui.mergemarkers or the tool's mergemarkers |
|
1094 | # settings if either ui.mergemarkers or the tool's mergemarkers | |
1099 | # setting is 'detailed'. This way tools can have basic labels in |
|
1095 | # setting is 'detailed'. This way tools can have basic labels in | |
1100 | # space-constrained areas of the UI, but still get full information |
|
1096 | # space-constrained areas of the UI, but still get full information | |
1101 | # in conflict markers if premerge is 'keep' or 'keep-merge3'. |
|
1097 | # in conflict markers if premerge is 'keep' or 'keep-merge3'. | |
1102 | labeltool = None |
|
1098 | labeltool = None | |
1103 | if markerstyle != b'basic': |
|
1099 | if markerstyle != b'basic': | |
1104 | # respect 'tool's mergemarkertemplate (which defaults to |
|
1100 | # respect 'tool's mergemarkertemplate (which defaults to | |
1105 | # command-templates.mergemarker) |
|
1101 | # command-templates.mergemarker) | |
1106 | labeltool = tool |
|
1102 | labeltool = tool | |
1107 | if internalmarkerstyle != b'basic' or markerstyle != b'basic': |
|
1103 | if internalmarkerstyle != b'basic' or markerstyle != b'basic': | |
1108 | _populate_label_details( |
|
1104 | _populate_label_details( | |
1109 | repo, [local, other, base], tool=labeltool |
|
1105 | repo, [local, other, base], tool=labeltool | |
1110 | ) |
|
1106 | ) | |
1111 |
|
1107 | |||
1112 | r = _premerge( |
|
1108 | r = _premerge( | |
1113 | repo, |
|
1109 | repo, | |
1114 | local, |
|
1110 | local, | |
1115 | other, |
|
1111 | other, | |
1116 | base, |
|
1112 | base, | |
1117 | toolconf, |
|
1113 | toolconf, | |
1118 | ) |
|
1114 | ) | |
1119 | # we're done if premerge was successful (r is 0) |
|
1115 | # we're done if premerge was successful (r is 0) | |
1120 | if not r: |
|
1116 | if not r: | |
1121 | return r, False |
|
1117 | return r, False | |
1122 |
|
1118 | |||
1123 | # Reset to basic labels |
|
1119 | # Reset to basic labels | |
1124 | local.label_detail = None |
|
1120 | local.label_detail = None | |
1125 | other.label_detail = None |
|
1121 | other.label_detail = None | |
1126 | base.label_detail = None |
|
1122 | base.label_detail = None | |
1127 |
|
1123 | |||
1128 | if markerstyle != b'basic': |
|
1124 | if markerstyle != b'basic': | |
1129 | _populate_label_details(repo, [local, other, base], tool=tool) |
|
1125 | _populate_label_details(repo, [local, other, base], tool=tool) | |
1130 |
|
1126 | |||
1131 | needcheck, r, deleted = func( |
|
1127 | needcheck, r, deleted = func( | |
1132 | repo, |
|
1128 | repo, | |
1133 | mynode, |
|
1129 | mynode, | |
1134 | local, |
|
1130 | local, | |
1135 | other, |
|
1131 | other, | |
1136 | base, |
|
1132 | base, | |
1137 | toolconf, |
|
1133 | toolconf, | |
1138 | backup, |
|
1134 | backup, | |
1139 | ) |
|
1135 | ) | |
1140 |
|
1136 | |||
1141 | if needcheck: |
|
1137 | if needcheck: | |
1142 | r = _check(repo, r, ui, tool, fcd, backup) |
|
1138 | r = _check(repo, r, ui, tool, fcd, backup) | |
1143 |
|
1139 | |||
1144 | if r: |
|
1140 | if r: | |
1145 | if onfailure: |
|
1141 | if onfailure: | |
1146 | if wctx.isinmemory(): |
|
1142 | if wctx.isinmemory(): | |
1147 | raise error.InMemoryMergeConflictsError( |
|
1143 | raise error.InMemoryMergeConflictsError( | |
1148 | b'in-memory merge ' |
|
1144 | b'in-memory merge ' | |
1149 | b'does not support ' |
|
1145 | b'does not support ' | |
1150 | b'merge conflicts' |
|
1146 | b'merge conflicts' | |
1151 | ) |
|
1147 | ) | |
1152 | ui.warn(onfailure % fduipath) |
|
1148 | ui.warn(onfailure % fduipath) | |
1153 | _onfilemergefailure(ui) |
|
1149 | _onfilemergefailure(ui) | |
1154 |
|
1150 | |||
1155 | return r, deleted |
|
1151 | return r, deleted | |
1156 | finally: |
|
1152 | finally: | |
1157 | if not r and backup is not None: |
|
1153 | if not r and backup is not None: | |
1158 | backup.remove() |
|
1154 | backup.remove() | |
1159 |
|
1155 | |||
1160 |
|
1156 | |||
1161 | def _haltmerge(): |
|
1157 | def _haltmerge(): | |
1162 | msg = _(b'merge halted after failed merge (see hg resolve)') |
|
1158 | msg = _(b'merge halted after failed merge (see hg resolve)') | |
1163 | raise error.InterventionRequired(msg) |
|
1159 | raise error.InterventionRequired(msg) | |
1164 |
|
1160 | |||
1165 |
|
1161 | |||
1166 | def _onfilemergefailure(ui): |
|
1162 | def _onfilemergefailure(ui): | |
1167 | action = ui.config(b'merge', b'on-failure') |
|
1163 | action = ui.config(b'merge', b'on-failure') | |
1168 | if action == b'prompt': |
|
1164 | if action == b'prompt': | |
1169 | msg = _(b'continue merge operation (yn)?$$ &Yes $$ &No') |
|
1165 | msg = _(b'continue merge operation (yn)?$$ &Yes $$ &No') | |
1170 | if ui.promptchoice(msg, 0) == 1: |
|
1166 | if ui.promptchoice(msg, 0) == 1: | |
1171 | _haltmerge() |
|
1167 | _haltmerge() | |
1172 | if action == b'halt': |
|
1168 | if action == b'halt': | |
1173 | _haltmerge() |
|
1169 | _haltmerge() | |
1174 | # default action is 'continue', in which case we neither prompt nor halt |
|
1170 | # default action is 'continue', in which case we neither prompt nor halt | |
1175 |
|
1171 | |||
1176 |
|
1172 | |||
1177 | def hasconflictmarkers(data): |
|
1173 | def hasconflictmarkers(data): | |
1178 | # Detect lines starting with a string of 7 identical characters from the |
|
1174 | # Detect lines starting with a string of 7 identical characters from the | |
1179 | # subset Mercurial uses for conflict markers, followed by either the end of |
|
1175 | # subset Mercurial uses for conflict markers, followed by either the end of | |
1180 | # line or a space and some text. Note that using [<>=+|-]{7} would detect |
|
1176 | # line or a space and some text. Note that using [<>=+|-]{7} would detect | |
1181 | # `<><><><><` as a conflict marker, which we don't want. |
|
1177 | # `<><><><><` as a conflict marker, which we don't want. | |
1182 | return bool( |
|
1178 | return bool( | |
1183 | re.search( |
|
1179 | re.search( | |
1184 | br"^([<>=+|-])\1{6}( .*)$", |
|
1180 | br"^([<>=+|-])\1{6}( .*)$", | |
1185 | data, |
|
1181 | data, | |
1186 | re.MULTILINE, |
|
1182 | re.MULTILINE, | |
1187 | ) |
|
1183 | ) | |
1188 | ) |
|
1184 | ) | |
1189 |
|
1185 | |||
1190 |
|
1186 | |||
1191 | def _check(repo, r, ui, tool, fcd, backup): |
|
1187 | def _check(repo, r, ui, tool, fcd, backup): | |
1192 | fd = fcd.path() |
|
1188 | fd = fcd.path() | |
1193 | uipathfn = scmutil.getuipathfn(repo) |
|
1189 | uipathfn = scmutil.getuipathfn(repo) | |
1194 |
|
1190 | |||
1195 | if not r and ( |
|
1191 | if not r and ( | |
1196 | _toolbool(ui, tool, b"checkconflicts") |
|
1192 | _toolbool(ui, tool, b"checkconflicts") | |
1197 | or b'conflicts' in _toollist(ui, tool, b"check") |
|
1193 | or b'conflicts' in _toollist(ui, tool, b"check") | |
1198 | ): |
|
1194 | ): | |
1199 | if hasconflictmarkers(fcd.data()): |
|
1195 | if hasconflictmarkers(fcd.data()): | |
1200 | r = 1 |
|
1196 | r = 1 | |
1201 |
|
1197 | |||
1202 | checked = False |
|
1198 | checked = False | |
1203 | if b'prompt' in _toollist(ui, tool, b"check"): |
|
1199 | if b'prompt' in _toollist(ui, tool, b"check"): | |
1204 | checked = True |
|
1200 | checked = True | |
1205 | if ui.promptchoice( |
|
1201 | if ui.promptchoice( | |
1206 | _(b"was merge of '%s' successful (yn)?$$ &Yes $$ &No") |
|
1202 | _(b"was merge of '%s' successful (yn)?$$ &Yes $$ &No") | |
1207 | % uipathfn(fd), |
|
1203 | % uipathfn(fd), | |
1208 | 1, |
|
1204 | 1, | |
1209 | ): |
|
1205 | ): | |
1210 | r = 1 |
|
1206 | r = 1 | |
1211 |
|
1207 | |||
1212 | if ( |
|
1208 | if ( | |
1213 | not r |
|
1209 | not r | |
1214 | and not checked |
|
1210 | and not checked | |
1215 | and ( |
|
1211 | and ( | |
1216 | _toolbool(ui, tool, b"checkchanged") |
|
1212 | _toolbool(ui, tool, b"checkchanged") | |
1217 | or b'changed' in _toollist(ui, tool, b"check") |
|
1213 | or b'changed' in _toollist(ui, tool, b"check") | |
1218 | ) |
|
1214 | ) | |
1219 | ): |
|
1215 | ): | |
1220 | if backup is not None and not fcd.cmp(backup): |
|
1216 | if backup is not None and not fcd.cmp(backup): | |
1221 | if ui.promptchoice( |
|
1217 | if ui.promptchoice( | |
1222 | _( |
|
1218 | _( | |
1223 | b" output file %s appears unchanged\n" |
|
1219 | b" output file %s appears unchanged\n" | |
1224 | b"was merge successful (yn)?" |
|
1220 | b"was merge successful (yn)?" | |
1225 | b"$$ &Yes $$ &No" |
|
1221 | b"$$ &Yes $$ &No" | |
1226 | ) |
|
1222 | ) | |
1227 | % uipathfn(fd), |
|
1223 | % uipathfn(fd), | |
1228 | 1, |
|
1224 | 1, | |
1229 | ): |
|
1225 | ): | |
1230 | r = 1 |
|
1226 | r = 1 | |
1231 |
|
1227 | |||
1232 | if backup is not None and _toolbool(ui, tool, b"fixeol"): |
|
1228 | if backup is not None and _toolbool(ui, tool, b"fixeol"): | |
1233 | _matcheol(_workingpath(repo, fcd), backup) |
|
1229 | _matcheol(_workingpath(repo, fcd), backup) | |
1234 |
|
1230 | |||
1235 | return r |
|
1231 | return r | |
1236 |
|
1232 | |||
1237 |
|
1233 | |||
1238 | def _workingpath(repo, ctx): |
|
1234 | def _workingpath(repo, ctx): | |
1239 | return repo.wjoin(ctx.path()) |
|
1235 | return repo.wjoin(ctx.path()) | |
1240 |
|
1236 | |||
1241 |
|
1237 | |||
1242 | def loadinternalmerge(ui, extname, registrarobj): |
|
1238 | def loadinternalmerge(ui, extname, registrarobj): | |
1243 | """Load internal merge tool from specified registrarobj""" |
|
1239 | """Load internal merge tool from specified registrarobj""" | |
1244 | for name, func in pycompat.iteritems(registrarobj._table): |
|
1240 | for name, func in pycompat.iteritems(registrarobj._table): | |
1245 | fullname = b':' + name |
|
1241 | fullname = b':' + name | |
1246 | internals[fullname] = func |
|
1242 | internals[fullname] = func | |
1247 | internals[b'internal:' + name] = func |
|
1243 | internals[b'internal:' + name] = func | |
1248 | internalsdoc[fullname] = func |
|
1244 | internalsdoc[fullname] = func | |
1249 |
|
1245 | |||
1250 | capabilities = sorted([k for k, v in func.capabilities.items() if v]) |
|
1246 | capabilities = sorted([k for k, v in func.capabilities.items() if v]) | |
1251 | if capabilities: |
|
1247 | if capabilities: | |
1252 | capdesc = b" (actual capabilities: %s)" % b', '.join( |
|
1248 | capdesc = b" (actual capabilities: %s)" % b', '.join( | |
1253 | capabilities |
|
1249 | capabilities | |
1254 | ) |
|
1250 | ) | |
1255 | func.__doc__ = func.__doc__ + pycompat.sysstr(b"\n\n%s" % capdesc) |
|
1251 | func.__doc__ = func.__doc__ + pycompat.sysstr(b"\n\n%s" % capdesc) | |
1256 |
|
1252 | |||
1257 | # to put i18n comments into hg.pot for automatically generated texts |
|
1253 | # to put i18n comments into hg.pot for automatically generated texts | |
1258 |
|
1254 | |||
1259 | # i18n: "binary" and "symlink" are keywords |
|
1255 | # i18n: "binary" and "symlink" are keywords | |
1260 | # i18n: this text is added automatically |
|
1256 | # i18n: this text is added automatically | |
1261 | _(b" (actual capabilities: binary, symlink)") |
|
1257 | _(b" (actual capabilities: binary, symlink)") | |
1262 | # i18n: "binary" is keyword |
|
1258 | # i18n: "binary" is keyword | |
1263 | # i18n: this text is added automatically |
|
1259 | # i18n: this text is added automatically | |
1264 | _(b" (actual capabilities: binary)") |
|
1260 | _(b" (actual capabilities: binary)") | |
1265 | # i18n: "symlink" is keyword |
|
1261 | # i18n: "symlink" is keyword | |
1266 | # i18n: this text is added automatically |
|
1262 | # i18n: this text is added automatically | |
1267 | _(b" (actual capabilities: symlink)") |
|
1263 | _(b" (actual capabilities: symlink)") | |
1268 |
|
1264 | |||
1269 |
|
1265 | |||
1270 | # load built-in merge tools explicitly to setup internalsdoc |
|
1266 | # load built-in merge tools explicitly to setup internalsdoc | |
1271 | loadinternalmerge(None, None, internaltool) |
|
1267 | loadinternalmerge(None, None, internaltool) | |
1272 |
|
1268 | |||
1273 | # tell hggettext to extract docstrings from these functions: |
|
1269 | # tell hggettext to extract docstrings from these functions: | |
1274 | i18nfunctions = internals.values() |
|
1270 | i18nfunctions = internals.values() |
@@ -1,914 +1,916 b'' | |||||
1 | from __future__ import absolute_import |
|
1 | from __future__ import absolute_import | |
2 |
|
2 | |||
3 | import collections |
|
3 | import collections | |
4 | import errno |
|
4 | import errno | |
5 | import shutil |
|
5 | import shutil | |
6 | import struct |
|
6 | import struct | |
7 | import weakref |
|
7 | import weakref | |
8 |
|
8 | |||
9 | from .i18n import _ |
|
9 | from .i18n import _ | |
10 | from .node import ( |
|
10 | from .node import ( | |
11 | bin, |
|
11 | bin, | |
12 | hex, |
|
12 | hex, | |
13 | nullrev, |
|
13 | nullrev, | |
14 | ) |
|
14 | ) | |
15 | from . import ( |
|
15 | from . import ( | |
16 | error, |
|
16 | error, | |
17 | filemerge, |
|
17 | filemerge, | |
18 | pycompat, |
|
18 | pycompat, | |
19 | util, |
|
19 | util, | |
20 | ) |
|
20 | ) | |
21 | from .utils import hashutil |
|
21 | from .utils import hashutil | |
22 |
|
22 | |||
23 | _pack = struct.pack |
|
23 | _pack = struct.pack | |
24 | _unpack = struct.unpack |
|
24 | _unpack = struct.unpack | |
25 |
|
25 | |||
26 |
|
26 | |||
27 | def _droponode(data): |
|
27 | def _droponode(data): | |
28 | # used for compatibility for v1 |
|
28 | # used for compatibility for v1 | |
29 | bits = data.split(b'\0') |
|
29 | bits = data.split(b'\0') | |
30 | bits = bits[:-2] + bits[-1:] |
|
30 | bits = bits[:-2] + bits[-1:] | |
31 | return b'\0'.join(bits) |
|
31 | return b'\0'.join(bits) | |
32 |
|
32 | |||
33 |
|
33 | |||
34 | def _filectxorabsent(hexnode, ctx, f): |
|
34 | def _filectxorabsent(hexnode, ctx, f): | |
35 | if hexnode == ctx.repo().nodeconstants.nullhex: |
|
35 | if hexnode == ctx.repo().nodeconstants.nullhex: | |
36 | return filemerge.absentfilectx(ctx, f) |
|
36 | return filemerge.absentfilectx(ctx, f) | |
37 | else: |
|
37 | else: | |
38 | return ctx[f] |
|
38 | return ctx[f] | |
39 |
|
39 | |||
40 |
|
40 | |||
41 | # Merge state record types. See ``mergestate`` docs for more. |
|
41 | # Merge state record types. See ``mergestate`` docs for more. | |
42 |
|
42 | |||
43 | #### |
|
43 | #### | |
44 | # merge records which records metadata about a current merge |
|
44 | # merge records which records metadata about a current merge | |
45 | # exists only once in a mergestate |
|
45 | # exists only once in a mergestate | |
46 | ##### |
|
46 | ##### | |
47 | RECORD_LOCAL = b'L' |
|
47 | RECORD_LOCAL = b'L' | |
48 | RECORD_OTHER = b'O' |
|
48 | RECORD_OTHER = b'O' | |
49 | # record merge labels |
|
49 | # record merge labels | |
50 | RECORD_LABELS = b'l' |
|
50 | RECORD_LABELS = b'l' | |
51 |
|
51 | |||
52 | ##### |
|
52 | ##### | |
53 | # record extra information about files, with one entry containing info about one |
|
53 | # record extra information about files, with one entry containing info about one | |
54 | # file. Hence, multiple of them can exists |
|
54 | # file. Hence, multiple of them can exists | |
55 | ##### |
|
55 | ##### | |
56 | RECORD_FILE_VALUES = b'f' |
|
56 | RECORD_FILE_VALUES = b'f' | |
57 |
|
57 | |||
58 | ##### |
|
58 | ##### | |
59 | # merge records which represents state of individual merges of files/folders |
|
59 | # merge records which represents state of individual merges of files/folders | |
60 | # These are top level records for each entry containing merge related info. |
|
60 | # These are top level records for each entry containing merge related info. | |
61 | # Each record of these has info about one file. Hence multiple of them can |
|
61 | # Each record of these has info about one file. Hence multiple of them can | |
62 | # exists |
|
62 | # exists | |
63 | ##### |
|
63 | ##### | |
64 | RECORD_MERGED = b'F' |
|
64 | RECORD_MERGED = b'F' | |
65 | RECORD_CHANGEDELETE_CONFLICT = b'C' |
|
65 | RECORD_CHANGEDELETE_CONFLICT = b'C' | |
66 | # the path was dir on one side of merge and file on another |
|
66 | # the path was dir on one side of merge and file on another | |
67 | RECORD_PATH_CONFLICT = b'P' |
|
67 | RECORD_PATH_CONFLICT = b'P' | |
68 |
|
68 | |||
69 | ##### |
|
69 | ##### | |
70 | # possible state which a merge entry can have. These are stored inside top-level |
|
70 | # possible state which a merge entry can have. These are stored inside top-level | |
71 | # merge records mentioned just above. |
|
71 | # merge records mentioned just above. | |
72 | ##### |
|
72 | ##### | |
73 | MERGE_RECORD_UNRESOLVED = b'u' |
|
73 | MERGE_RECORD_UNRESOLVED = b'u' | |
74 | MERGE_RECORD_RESOLVED = b'r' |
|
74 | MERGE_RECORD_RESOLVED = b'r' | |
75 | MERGE_RECORD_UNRESOLVED_PATH = b'pu' |
|
75 | MERGE_RECORD_UNRESOLVED_PATH = b'pu' | |
76 | MERGE_RECORD_RESOLVED_PATH = b'pr' |
|
76 | MERGE_RECORD_RESOLVED_PATH = b'pr' | |
77 | # represents that the file was automatically merged in favor |
|
77 | # represents that the file was automatically merged in favor | |
78 | # of other version. This info is used on commit. |
|
78 | # of other version. This info is used on commit. | |
79 | # This is now deprecated and commit related information is now |
|
79 | # This is now deprecated and commit related information is now | |
80 | # stored in RECORD_FILE_VALUES |
|
80 | # stored in RECORD_FILE_VALUES | |
81 | MERGE_RECORD_MERGED_OTHER = b'o' |
|
81 | MERGE_RECORD_MERGED_OTHER = b'o' | |
82 |
|
82 | |||
83 | ##### |
|
83 | ##### | |
84 | # top level record which stores other unknown records. Multiple of these can |
|
84 | # top level record which stores other unknown records. Multiple of these can | |
85 | # exists |
|
85 | # exists | |
86 | ##### |
|
86 | ##### | |
87 | RECORD_OVERRIDE = b't' |
|
87 | RECORD_OVERRIDE = b't' | |
88 |
|
88 | |||
89 | ##### |
|
89 | ##### | |
90 | # legacy records which are no longer used but kept to prevent breaking BC |
|
90 | # legacy records which are no longer used but kept to prevent breaking BC | |
91 | ##### |
|
91 | ##### | |
92 | # This record was release in 5.4 and usage was removed in 5.5 |
|
92 | # This record was release in 5.4 and usage was removed in 5.5 | |
93 | LEGACY_RECORD_RESOLVED_OTHER = b'R' |
|
93 | LEGACY_RECORD_RESOLVED_OTHER = b'R' | |
94 | # This record was release in 3.7 and usage was removed in 5.6 |
|
94 | # This record was release in 3.7 and usage was removed in 5.6 | |
95 | LEGACY_RECORD_DRIVER_RESOLVED = b'd' |
|
95 | LEGACY_RECORD_DRIVER_RESOLVED = b'd' | |
96 | # This record was release in 3.7 and usage was removed in 5.6 |
|
96 | # This record was release in 3.7 and usage was removed in 5.6 | |
97 | LEGACY_MERGE_DRIVER_STATE = b'm' |
|
97 | LEGACY_MERGE_DRIVER_STATE = b'm' | |
98 | # This record was release in 3.7 and usage was removed in 5.6 |
|
98 | # This record was release in 3.7 and usage was removed in 5.6 | |
99 | LEGACY_MERGE_DRIVER_MERGE = b'D' |
|
99 | LEGACY_MERGE_DRIVER_MERGE = b'D' | |
100 |
|
100 | |||
101 | CHANGE_ADDED = b'added' |
|
101 | CHANGE_ADDED = b'added' | |
102 | CHANGE_REMOVED = b'removed' |
|
102 | CHANGE_REMOVED = b'removed' | |
103 | CHANGE_MODIFIED = b'modified' |
|
103 | CHANGE_MODIFIED = b'modified' | |
104 |
|
104 | |||
105 |
|
105 | |||
106 | class MergeAction(object): |
|
106 | class MergeAction(object): | |
107 | """represent an "action" merge need to take for a given file |
|
107 | """represent an "action" merge need to take for a given file | |
108 |
|
108 | |||
109 | Attributes: |
|
109 | Attributes: | |
110 |
|
110 | |||
111 | _short: internal representation used to identify each action |
|
111 | _short: internal representation used to identify each action | |
112 |
|
112 | |||
113 | no_op: True if the action does affect the file content or tracking status |
|
113 | no_op: True if the action does affect the file content or tracking status | |
114 |
|
114 | |||
115 | narrow_safe: |
|
115 | narrow_safe: | |
116 | True if the action can be safely used for a file outside of the narrow |
|
116 | True if the action can be safely used for a file outside of the narrow | |
117 | set |
|
117 | set | |
118 |
|
118 | |||
119 | changes: |
|
119 | changes: | |
120 | The types of changes that this actions involves. This is a work in |
|
120 | The types of changes that this actions involves. This is a work in | |
121 | progress and not all actions have one yet. In addition, some requires |
|
121 | progress and not all actions have one yet. In addition, some requires | |
122 | user changes and cannot be fully decided. The value currently available |
|
122 | user changes and cannot be fully decided. The value currently available | |
123 | are: |
|
123 | are: | |
124 |
|
124 | |||
125 | - ADDED: the files is new in both parents |
|
125 | - ADDED: the files is new in both parents | |
126 | - REMOVED: the files existed in one parent and is getting removed |
|
126 | - REMOVED: the files existed in one parent and is getting removed | |
127 | - MODIFIED: the files existed in at least one parent and is getting changed |
|
127 | - MODIFIED: the files existed in at least one parent and is getting changed | |
128 | """ |
|
128 | """ | |
129 |
|
129 | |||
130 | ALL_ACTIONS = weakref.WeakSet() |
|
130 | ALL_ACTIONS = weakref.WeakSet() | |
131 | NO_OP_ACTIONS = weakref.WeakSet() |
|
131 | NO_OP_ACTIONS = weakref.WeakSet() | |
132 |
|
132 | |||
133 | def __init__(self, short, no_op=False, narrow_safe=False, changes=None): |
|
133 | def __init__(self, short, no_op=False, narrow_safe=False, changes=None): | |
134 | self._short = short |
|
134 | self._short = short | |
135 | self.ALL_ACTIONS.add(self) |
|
135 | self.ALL_ACTIONS.add(self) | |
136 | self.no_op = no_op |
|
136 | self.no_op = no_op | |
137 | if self.no_op: |
|
137 | if self.no_op: | |
138 | self.NO_OP_ACTIONS.add(self) |
|
138 | self.NO_OP_ACTIONS.add(self) | |
139 | self.narrow_safe = narrow_safe |
|
139 | self.narrow_safe = narrow_safe | |
140 | self.changes = changes |
|
140 | self.changes = changes | |
141 |
|
141 | |||
142 | def __hash__(self): |
|
142 | def __hash__(self): | |
143 | return hash(self._short) |
|
143 | return hash(self._short) | |
144 |
|
144 | |||
145 | def __repr__(self): |
|
145 | def __repr__(self): | |
146 | return 'MergeAction<%s>' % self._short.decode('ascii') |
|
146 | return 'MergeAction<%s>' % self._short.decode('ascii') | |
147 |
|
147 | |||
148 | def __bytes__(self): |
|
148 | def __bytes__(self): | |
149 | return self._short |
|
149 | return self._short | |
150 |
|
150 | |||
151 | def __eq__(self, other): |
|
151 | def __eq__(self, other): | |
152 | if other is None: |
|
152 | if other is None: | |
153 | return False |
|
153 | return False | |
154 | assert isinstance(other, MergeAction) |
|
154 | assert isinstance(other, MergeAction) | |
155 | return self._short == other._short |
|
155 | return self._short == other._short | |
156 |
|
156 | |||
157 | def __lt__(self, other): |
|
157 | def __lt__(self, other): | |
158 | return self._short < other._short |
|
158 | return self._short < other._short | |
159 |
|
159 | |||
160 |
|
160 | |||
161 | ACTION_FORGET = MergeAction(b'f', narrow_safe=True, changes=CHANGE_REMOVED) |
|
161 | ACTION_FORGET = MergeAction(b'f', narrow_safe=True, changes=CHANGE_REMOVED) | |
162 | ACTION_REMOVE = MergeAction(b'r', narrow_safe=True, changes=CHANGE_REMOVED) |
|
162 | ACTION_REMOVE = MergeAction(b'r', narrow_safe=True, changes=CHANGE_REMOVED) | |
163 | ACTION_ADD = MergeAction(b'a', narrow_safe=True, changes=CHANGE_ADDED) |
|
163 | ACTION_ADD = MergeAction(b'a', narrow_safe=True, changes=CHANGE_ADDED) | |
164 | ACTION_GET = MergeAction(b'g', narrow_safe=True, changes=CHANGE_MODIFIED) |
|
164 | ACTION_GET = MergeAction(b'g', narrow_safe=True, changes=CHANGE_MODIFIED) | |
165 | ACTION_PATH_CONFLICT = MergeAction(b'p') |
|
165 | ACTION_PATH_CONFLICT = MergeAction(b'p') | |
166 | ACTION_PATH_CONFLICT_RESOLVE = MergeAction('pr') |
|
166 | ACTION_PATH_CONFLICT_RESOLVE = MergeAction('pr') | |
167 | ACTION_ADD_MODIFIED = MergeAction( |
|
167 | ACTION_ADD_MODIFIED = MergeAction( | |
168 | b'am', narrow_safe=True, changes=CHANGE_ADDED |
|
168 | b'am', narrow_safe=True, changes=CHANGE_ADDED | |
169 | ) # not 100% about the changes value here |
|
169 | ) # not 100% about the changes value here | |
170 | ACTION_CREATED = MergeAction(b'c', narrow_safe=True, changes=CHANGE_ADDED) |
|
170 | ACTION_CREATED = MergeAction(b'c', narrow_safe=True, changes=CHANGE_ADDED) | |
171 | ACTION_DELETED_CHANGED = MergeAction(b'dc') |
|
171 | ACTION_DELETED_CHANGED = MergeAction(b'dc') | |
172 | ACTION_CHANGED_DELETED = MergeAction(b'cd') |
|
172 | ACTION_CHANGED_DELETED = MergeAction(b'cd') | |
173 | ACTION_MERGE = MergeAction(b'm') |
|
173 | ACTION_MERGE = MergeAction(b'm') | |
174 | ACTION_LOCAL_DIR_RENAME_GET = MergeAction(b'dg') |
|
174 | ACTION_LOCAL_DIR_RENAME_GET = MergeAction(b'dg') | |
175 | ACTION_DIR_RENAME_MOVE_LOCAL = MergeAction(b'dm') |
|
175 | ACTION_DIR_RENAME_MOVE_LOCAL = MergeAction(b'dm') | |
176 | ACTION_KEEP = MergeAction(b'k', no_op=True) |
|
176 | ACTION_KEEP = MergeAction(b'k', no_op=True) | |
177 | # the file was absent on local side before merge and we should |
|
177 | # the file was absent on local side before merge and we should | |
178 | # keep it absent (absent means file not present, it can be a result |
|
178 | # keep it absent (absent means file not present, it can be a result | |
179 | # of file deletion, rename etc.) |
|
179 | # of file deletion, rename etc.) | |
180 | ACTION_KEEP_ABSENT = MergeAction(b'ka', no_op=True) |
|
180 | ACTION_KEEP_ABSENT = MergeAction(b'ka', no_op=True) | |
181 | # the file is absent on the ancestor and remote side of the merge |
|
181 | # the file is absent on the ancestor and remote side of the merge | |
182 | # hence this file is new and we should keep it |
|
182 | # hence this file is new and we should keep it | |
183 | ACTION_KEEP_NEW = MergeAction(b'kn', no_op=True) |
|
183 | ACTION_KEEP_NEW = MergeAction(b'kn', no_op=True) | |
184 | ACTION_EXEC = MergeAction(b'e', narrow_safe=True, changes=CHANGE_MODIFIED) |
|
184 | ACTION_EXEC = MergeAction(b'e', narrow_safe=True, changes=CHANGE_MODIFIED) | |
185 | ACTION_CREATED_MERGE = MergeAction( |
|
185 | ACTION_CREATED_MERGE = MergeAction( | |
186 | b'cm', narrow_safe=True, changes=CHANGE_ADDED |
|
186 | b'cm', narrow_safe=True, changes=CHANGE_ADDED | |
187 | ) |
|
187 | ) | |
188 |
|
188 | |||
189 |
|
189 | |||
190 | # Used by concert to detect situation it does not like, not sure what the exact |
|
190 | # Used by concert to detect situation it does not like, not sure what the exact | |
191 | # criteria is |
|
191 | # criteria is | |
192 | CONVERT_MERGE_ACTIONS = ( |
|
192 | CONVERT_MERGE_ACTIONS = ( | |
193 | ACTION_MERGE, |
|
193 | ACTION_MERGE, | |
194 | ACTION_DIR_RENAME_MOVE_LOCAL, |
|
194 | ACTION_DIR_RENAME_MOVE_LOCAL, | |
195 | ACTION_CHANGED_DELETED, |
|
195 | ACTION_CHANGED_DELETED, | |
196 | ACTION_DELETED_CHANGED, |
|
196 | ACTION_DELETED_CHANGED, | |
197 | ) |
|
197 | ) | |
198 |
|
198 | |||
199 |
|
199 | |||
200 | class _mergestate_base(object): |
|
200 | class _mergestate_base(object): | |
201 | """track 3-way merge state of individual files |
|
201 | """track 3-way merge state of individual files | |
202 |
|
202 | |||
203 | The merge state is stored on disk when needed. Two files are used: one with |
|
203 | The merge state is stored on disk when needed. Two files are used: one with | |
204 | an old format (version 1), and one with a new format (version 2). Version 2 |
|
204 | an old format (version 1), and one with a new format (version 2). Version 2 | |
205 | stores a superset of the data in version 1, including new kinds of records |
|
205 | stores a superset of the data in version 1, including new kinds of records | |
206 | in the future. For more about the new format, see the documentation for |
|
206 | in the future. For more about the new format, see the documentation for | |
207 | `_readrecordsv2`. |
|
207 | `_readrecordsv2`. | |
208 |
|
208 | |||
209 | Each record can contain arbitrary content, and has an associated type. This |
|
209 | Each record can contain arbitrary content, and has an associated type. This | |
210 | `type` should be a letter. If `type` is uppercase, the record is mandatory: |
|
210 | `type` should be a letter. If `type` is uppercase, the record is mandatory: | |
211 | versions of Mercurial that don't support it should abort. If `type` is |
|
211 | versions of Mercurial that don't support it should abort. If `type` is | |
212 | lowercase, the record can be safely ignored. |
|
212 | lowercase, the record can be safely ignored. | |
213 |
|
213 | |||
214 | Currently known records: |
|
214 | Currently known records: | |
215 |
|
215 | |||
216 | L: the node of the "local" part of the merge (hexified version) |
|
216 | L: the node of the "local" part of the merge (hexified version) | |
217 | O: the node of the "other" part of the merge (hexified version) |
|
217 | O: the node of the "other" part of the merge (hexified version) | |
218 | F: a file to be merged entry |
|
218 | F: a file to be merged entry | |
219 | C: a change/delete or delete/change conflict |
|
219 | C: a change/delete or delete/change conflict | |
220 | P: a path conflict (file vs directory) |
|
220 | P: a path conflict (file vs directory) | |
221 | f: a (filename, dictionary) tuple of optional values for a given file |
|
221 | f: a (filename, dictionary) tuple of optional values for a given file | |
222 | l: the labels for the parts of the merge. |
|
222 | l: the labels for the parts of the merge. | |
223 |
|
223 | |||
224 | Merge record states (stored in self._state, indexed by filename): |
|
224 | Merge record states (stored in self._state, indexed by filename): | |
225 | u: unresolved conflict |
|
225 | u: unresolved conflict | |
226 | r: resolved conflict |
|
226 | r: resolved conflict | |
227 | pu: unresolved path conflict (file conflicts with directory) |
|
227 | pu: unresolved path conflict (file conflicts with directory) | |
228 | pr: resolved path conflict |
|
228 | pr: resolved path conflict | |
229 | o: file was merged in favor of other parent of merge (DEPRECATED) |
|
229 | o: file was merged in favor of other parent of merge (DEPRECATED) | |
230 |
|
230 | |||
231 | The resolve command transitions between 'u' and 'r' for conflicts and |
|
231 | The resolve command transitions between 'u' and 'r' for conflicts and | |
232 | 'pu' and 'pr' for path conflicts. |
|
232 | 'pu' and 'pr' for path conflicts. | |
233 | """ |
|
233 | """ | |
234 |
|
234 | |||
235 | def __init__(self, repo): |
|
235 | def __init__(self, repo): | |
236 | """Initialize the merge state. |
|
236 | """Initialize the merge state. | |
237 |
|
237 | |||
238 | Do not use this directly! Instead call read() or clean().""" |
|
238 | Do not use this directly! Instead call read() or clean().""" | |
239 | self._repo = repo |
|
239 | self._repo = repo | |
240 | self._state = {} |
|
240 | self._state = {} | |
241 | self._stateextras = collections.defaultdict(dict) |
|
241 | self._stateextras = collections.defaultdict(dict) | |
242 | self._local = None |
|
242 | self._local = None | |
243 | self._other = None |
|
243 | self._other = None | |
244 | self._labels = None |
|
244 | self._labels = None | |
245 | # contains a mapping of form: |
|
245 | # contains a mapping of form: | |
246 | # {filename : (merge_return_value, action_to_be_performed} |
|
246 | # {filename : (merge_return_value, action_to_be_performed} | |
247 | # these are results of re-running merge process |
|
247 | # these are results of re-running merge process | |
248 | # this dict is used to perform actions on dirstate caused by re-running |
|
248 | # this dict is used to perform actions on dirstate caused by re-running | |
249 | # the merge |
|
249 | # the merge | |
250 | self._results = {} |
|
250 | self._results = {} | |
251 | self._dirty = False |
|
251 | self._dirty = False | |
252 |
|
252 | |||
253 | def reset(self): |
|
253 | def reset(self): | |
254 | pass |
|
254 | pass | |
255 |
|
255 | |||
256 | def start(self, node, other, labels=None): |
|
256 | def start(self, node, other, labels=None): | |
257 | self._local = node |
|
257 | self._local = node | |
258 | self._other = other |
|
258 | self._other = other | |
259 | self._labels = labels |
|
259 | self._labels = labels | |
260 |
|
260 | |||
261 | @util.propertycache |
|
261 | @util.propertycache | |
262 | def local(self): |
|
262 | def local(self): | |
263 | if self._local is None: |
|
263 | if self._local is None: | |
264 | msg = b"local accessed but self._local isn't set" |
|
264 | msg = b"local accessed but self._local isn't set" | |
265 | raise error.ProgrammingError(msg) |
|
265 | raise error.ProgrammingError(msg) | |
266 | return self._local |
|
266 | return self._local | |
267 |
|
267 | |||
268 | @util.propertycache |
|
268 | @util.propertycache | |
269 | def localctx(self): |
|
269 | def localctx(self): | |
270 | return self._repo[self.local] |
|
270 | return self._repo[self.local] | |
271 |
|
271 | |||
272 | @util.propertycache |
|
272 | @util.propertycache | |
273 | def other(self): |
|
273 | def other(self): | |
274 | if self._other is None: |
|
274 | if self._other is None: | |
275 | msg = b"other accessed but self._other isn't set" |
|
275 | msg = b"other accessed but self._other isn't set" | |
276 | raise error.ProgrammingError(msg) |
|
276 | raise error.ProgrammingError(msg) | |
277 | return self._other |
|
277 | return self._other | |
278 |
|
278 | |||
279 | @util.propertycache |
|
279 | @util.propertycache | |
280 | def otherctx(self): |
|
280 | def otherctx(self): | |
281 | return self._repo[self.other] |
|
281 | return self._repo[self.other] | |
282 |
|
282 | |||
283 | def active(self): |
|
283 | def active(self): | |
284 | """Whether mergestate is active. |
|
284 | """Whether mergestate is active. | |
285 |
|
285 | |||
286 | Returns True if there appears to be mergestate. This is a rough proxy |
|
286 | Returns True if there appears to be mergestate. This is a rough proxy | |
287 | for "is a merge in progress." |
|
287 | for "is a merge in progress." | |
288 | """ |
|
288 | """ | |
289 | return bool(self._local) or bool(self._state) |
|
289 | return bool(self._local) or bool(self._state) | |
290 |
|
290 | |||
291 | def commit(self): |
|
291 | def commit(self): | |
292 | """Write current state on disk (if necessary)""" |
|
292 | """Write current state on disk (if necessary)""" | |
293 |
|
293 | |||
294 | @staticmethod |
|
294 | @staticmethod | |
295 | def getlocalkey(path): |
|
295 | def getlocalkey(path): | |
296 | """hash the path of a local file context for storage in the .hg/merge |
|
296 | """hash the path of a local file context for storage in the .hg/merge | |
297 | directory.""" |
|
297 | directory.""" | |
298 |
|
298 | |||
299 | return hex(hashutil.sha1(path).digest()) |
|
299 | return hex(hashutil.sha1(path).digest()) | |
300 |
|
300 | |||
301 | def _make_backup(self, fctx, localkey): |
|
301 | def _make_backup(self, fctx, localkey): | |
302 | raise NotImplementedError() |
|
302 | raise NotImplementedError() | |
303 |
|
303 | |||
304 | def _restore_backup(self, fctx, localkey, flags): |
|
304 | def _restore_backup(self, fctx, localkey, flags): | |
305 | raise NotImplementedError() |
|
305 | raise NotImplementedError() | |
306 |
|
306 | |||
307 | def add(self, fcl, fco, fca, fd): |
|
307 | def add(self, fcl, fco, fca, fd): | |
308 | """add a new (potentially?) conflicting file the merge state |
|
308 | """add a new (potentially?) conflicting file the merge state | |
309 | fcl: file context for local, |
|
309 | fcl: file context for local, | |
310 | fco: file context for remote, |
|
310 | fco: file context for remote, | |
311 | fca: file context for ancestors, |
|
311 | fca: file context for ancestors, | |
312 | fd: file path of the resulting merge. |
|
312 | fd: file path of the resulting merge. | |
313 |
|
313 | |||
314 | note: also write the local version to the `.hg/merge` directory. |
|
314 | note: also write the local version to the `.hg/merge` directory. | |
315 | """ |
|
315 | """ | |
316 | if fcl.isabsent(): |
|
316 | if fcl.isabsent(): | |
317 | localkey = self._repo.nodeconstants.nullhex |
|
317 | localkey = self._repo.nodeconstants.nullhex | |
318 | else: |
|
318 | else: | |
319 | localkey = mergestate.getlocalkey(fcl.path()) |
|
319 | localkey = mergestate.getlocalkey(fcl.path()) | |
320 | self._make_backup(fcl, localkey) |
|
320 | self._make_backup(fcl, localkey) | |
321 | self._state[fd] = [ |
|
321 | self._state[fd] = [ | |
322 | MERGE_RECORD_UNRESOLVED, |
|
322 | MERGE_RECORD_UNRESOLVED, | |
323 | localkey, |
|
323 | localkey, | |
324 | fcl.path(), |
|
324 | fcl.path(), | |
325 | fca.path(), |
|
325 | fca.path(), | |
326 | hex(fca.filenode()), |
|
326 | hex(fca.filenode()), | |
327 | fco.path(), |
|
327 | fco.path(), | |
328 | hex(fco.filenode()), |
|
328 | hex(fco.filenode()), | |
329 | fcl.flags(), |
|
329 | fcl.flags(), | |
330 | ] |
|
330 | ] | |
331 | self._stateextras[fd][b'ancestorlinknode'] = hex(fca.node()) |
|
331 | self._stateextras[fd][b'ancestorlinknode'] = hex(fca.node()) | |
332 | self._dirty = True |
|
332 | self._dirty = True | |
333 |
|
333 | |||
334 | def addpathconflict(self, path, frename, forigin): |
|
334 | def addpathconflict(self, path, frename, forigin): | |
335 | """add a new conflicting path to the merge state |
|
335 | """add a new conflicting path to the merge state | |
336 | path: the path that conflicts |
|
336 | path: the path that conflicts | |
337 | frename: the filename the conflicting file was renamed to |
|
337 | frename: the filename the conflicting file was renamed to | |
338 | forigin: origin of the file ('l' or 'r' for local/remote) |
|
338 | forigin: origin of the file ('l' or 'r' for local/remote) | |
339 | """ |
|
339 | """ | |
340 | self._state[path] = [MERGE_RECORD_UNRESOLVED_PATH, frename, forigin] |
|
340 | self._state[path] = [MERGE_RECORD_UNRESOLVED_PATH, frename, forigin] | |
341 | self._dirty = True |
|
341 | self._dirty = True | |
342 |
|
342 | |||
343 | def addcommitinfo(self, path, data): |
|
343 | def addcommitinfo(self, path, data): | |
344 | """stores information which is required at commit |
|
344 | """stores information which is required at commit | |
345 | into _stateextras""" |
|
345 | into _stateextras""" | |
346 | self._stateextras[path].update(data) |
|
346 | self._stateextras[path].update(data) | |
347 | self._dirty = True |
|
347 | self._dirty = True | |
348 |
|
348 | |||
349 | def __contains__(self, dfile): |
|
349 | def __contains__(self, dfile): | |
350 | return dfile in self._state |
|
350 | return dfile in self._state | |
351 |
|
351 | |||
352 | def __getitem__(self, dfile): |
|
352 | def __getitem__(self, dfile): | |
353 | return self._state[dfile][0] |
|
353 | return self._state[dfile][0] | |
354 |
|
354 | |||
355 | def __iter__(self): |
|
355 | def __iter__(self): | |
356 | return iter(sorted(self._state)) |
|
356 | return iter(sorted(self._state)) | |
357 |
|
357 | |||
358 | def files(self): |
|
358 | def files(self): | |
359 | return self._state.keys() |
|
359 | return self._state.keys() | |
360 |
|
360 | |||
361 | def mark(self, dfile, state): |
|
361 | def mark(self, dfile, state): | |
362 | self._state[dfile][0] = state |
|
362 | self._state[dfile][0] = state | |
363 | self._dirty = True |
|
363 | self._dirty = True | |
364 |
|
364 | |||
365 | def unresolved(self): |
|
365 | def unresolved(self): | |
366 | """Obtain the paths of unresolved files.""" |
|
366 | """Obtain the paths of unresolved files.""" | |
367 |
|
367 | |||
368 | for f, entry in pycompat.iteritems(self._state): |
|
368 | for f, entry in pycompat.iteritems(self._state): | |
369 | if entry[0] in ( |
|
369 | if entry[0] in ( | |
370 | MERGE_RECORD_UNRESOLVED, |
|
370 | MERGE_RECORD_UNRESOLVED, | |
371 | MERGE_RECORD_UNRESOLVED_PATH, |
|
371 | MERGE_RECORD_UNRESOLVED_PATH, | |
372 | ): |
|
372 | ): | |
373 | yield f |
|
373 | yield f | |
374 |
|
374 | |||
375 | def allextras(self): |
|
375 | def allextras(self): | |
376 | """return all extras information stored with the mergestate""" |
|
376 | """return all extras information stored with the mergestate""" | |
377 | return self._stateextras |
|
377 | return self._stateextras | |
378 |
|
378 | |||
379 | def extras(self, filename): |
|
379 | def extras(self, filename): | |
380 | """return extras stored with the mergestate for the given filename""" |
|
380 | """return extras stored with the mergestate for the given filename""" | |
381 | return self._stateextras[filename] |
|
381 | return self._stateextras[filename] | |
382 |
|
382 | |||
383 | def resolve(self, dfile, wctx): |
|
383 | def resolve(self, dfile, wctx): | |
384 | """run merge process for dfile |
|
384 | """run merge process for dfile | |
385 |
|
385 | |||
386 | Returns the exit code of the merge.""" |
|
386 | Returns the exit code of the merge.""" | |
387 | if self[dfile] in ( |
|
387 | if self[dfile] in ( | |
388 | MERGE_RECORD_RESOLVED, |
|
388 | MERGE_RECORD_RESOLVED, | |
389 | LEGACY_RECORD_DRIVER_RESOLVED, |
|
389 | LEGACY_RECORD_DRIVER_RESOLVED, | |
390 | ): |
|
390 | ): | |
391 | return 0 |
|
391 | return 0 | |
392 | stateentry = self._state[dfile] |
|
392 | stateentry = self._state[dfile] | |
393 | state, localkey, lfile, afile, anode, ofile, onode, flags = stateentry |
|
393 | state, localkey, lfile, afile, anode, ofile, onode, flags = stateentry | |
394 | octx = self._repo[self._other] |
|
394 | octx = self._repo[self._other] | |
395 | extras = self.extras(dfile) |
|
395 | extras = self.extras(dfile) | |
396 | anccommitnode = extras.get(b'ancestorlinknode') |
|
396 | anccommitnode = extras.get(b'ancestorlinknode') | |
397 | if anccommitnode: |
|
397 | if anccommitnode: | |
398 | actx = self._repo[anccommitnode] |
|
398 | actx = self._repo[anccommitnode] | |
399 | else: |
|
399 | else: | |
400 | actx = None |
|
400 | actx = None | |
401 | fcd = _filectxorabsent(localkey, wctx, dfile) |
|
401 | fcd = _filectxorabsent(localkey, wctx, dfile) | |
402 | fco = _filectxorabsent(onode, octx, ofile) |
|
402 | fco = _filectxorabsent(onode, octx, ofile) | |
403 | # TODO: move this to filectxorabsent |
|
403 | # TODO: move this to filectxorabsent | |
404 | fca = self._repo.filectx(afile, fileid=anode, changectx=actx) |
|
404 | fca = self._repo.filectx(afile, fileid=anode, changectx=actx) | |
405 | # "premerge" x flags |
|
405 | # "premerge" x flags | |
406 | flo = fco.flags() |
|
406 | flo = fco.flags() | |
407 | fla = fca.flags() |
|
407 | fla = fca.flags() | |
408 | if b'x' in flags + flo + fla and b'l' not in flags + flo + fla: |
|
408 | if b'x' in flags + flo + fla and b'l' not in flags + flo + fla: | |
409 | if fca.rev() == nullrev and flags != flo: |
|
409 | if fca.rev() == nullrev and flags != flo: | |
410 | self._repo.ui.warn( |
|
410 | self._repo.ui.warn( | |
411 | _( |
|
411 | _( | |
412 | b'warning: cannot merge flags for %s ' |
|
412 | b'warning: cannot merge flags for %s ' | |
413 | b'without common ancestor - keeping local flags\n' |
|
413 | b'without common ancestor - keeping local flags\n' | |
414 | ) |
|
414 | ) | |
415 | % afile |
|
415 | % afile | |
416 | ) |
|
416 | ) | |
417 | elif flags == fla: |
|
417 | elif flags == fla: | |
418 | flags = flo |
|
418 | flags = flo | |
419 | # restore local |
|
419 | # restore local | |
420 | if localkey != self._repo.nodeconstants.nullhex: |
|
420 | if localkey != self._repo.nodeconstants.nullhex: | |
421 | self._restore_backup(wctx[dfile], localkey, flags) |
|
421 | self._restore_backup(wctx[dfile], localkey, flags) | |
422 | else: |
|
422 | else: | |
423 | wctx[dfile].remove(ignoremissing=True) |
|
423 | wctx[dfile].remove(ignoremissing=True) | |
|
424 | ||||
|
425 | if not fco.cmp(fcd): # files identical? | |||
|
426 | # If return value of merge is None, then there are no real conflict | |||
|
427 | del self._state[dfile] | |||
|
428 | self._results[dfile] = None, None | |||
|
429 | self._dirty = True | |||
|
430 | return None | |||
|
431 | ||||
424 | merge_ret, deleted = filemerge.filemerge( |
|
432 | merge_ret, deleted = filemerge.filemerge( | |
425 | self._repo, |
|
433 | self._repo, | |
426 | wctx, |
|
434 | wctx, | |
427 | self._local, |
|
435 | self._local, | |
428 | lfile, |
|
436 | lfile, | |
429 | fcd, |
|
437 | fcd, | |
430 | fco, |
|
438 | fco, | |
431 | fca, |
|
439 | fca, | |
432 | labels=self._labels, |
|
440 | labels=self._labels, | |
433 | ) |
|
441 | ) | |
434 | if merge_ret is None: |
|
|||
435 | # If return value of merge is None, then there are no real conflict |
|
|||
436 | del self._state[dfile] |
|
|||
437 | self._results[dfile] = None, None |
|
|||
438 | self._dirty = True |
|
|||
439 | return None |
|
|||
440 |
|
442 | |||
441 | if not merge_ret: |
|
443 | if not merge_ret: | |
442 | self.mark(dfile, MERGE_RECORD_RESOLVED) |
|
444 | self.mark(dfile, MERGE_RECORD_RESOLVED) | |
443 |
|
445 | |||
444 | action = None |
|
446 | action = None | |
445 | if deleted: |
|
447 | if deleted: | |
446 | if fcd.isabsent(): |
|
448 | if fcd.isabsent(): | |
447 | # dc: local picked. Need to drop if present, which may |
|
449 | # dc: local picked. Need to drop if present, which may | |
448 | # happen on re-resolves. |
|
450 | # happen on re-resolves. | |
449 | action = ACTION_FORGET |
|
451 | action = ACTION_FORGET | |
450 | else: |
|
452 | else: | |
451 | # cd: remote picked (or otherwise deleted) |
|
453 | # cd: remote picked (or otherwise deleted) | |
452 | action = ACTION_REMOVE |
|
454 | action = ACTION_REMOVE | |
453 | else: |
|
455 | else: | |
454 | if fcd.isabsent(): # dc: remote picked |
|
456 | if fcd.isabsent(): # dc: remote picked | |
455 | action = ACTION_GET |
|
457 | action = ACTION_GET | |
456 | elif fco.isabsent(): # cd: local picked |
|
458 | elif fco.isabsent(): # cd: local picked | |
457 | if dfile in self.localctx: |
|
459 | if dfile in self.localctx: | |
458 | action = ACTION_ADD_MODIFIED |
|
460 | action = ACTION_ADD_MODIFIED | |
459 | else: |
|
461 | else: | |
460 | action = ACTION_ADD |
|
462 | action = ACTION_ADD | |
461 | # else: regular merges (no action necessary) |
|
463 | # else: regular merges (no action necessary) | |
462 | self._results[dfile] = merge_ret, action |
|
464 | self._results[dfile] = merge_ret, action | |
463 |
|
465 | |||
464 | return merge_ret |
|
466 | return merge_ret | |
465 |
|
467 | |||
466 | def counts(self): |
|
468 | def counts(self): | |
467 | """return counts for updated, merged and removed files in this |
|
469 | """return counts for updated, merged and removed files in this | |
468 | session""" |
|
470 | session""" | |
469 | updated, merged, removed = 0, 0, 0 |
|
471 | updated, merged, removed = 0, 0, 0 | |
470 | for r, action in pycompat.itervalues(self._results): |
|
472 | for r, action in pycompat.itervalues(self._results): | |
471 | if r is None: |
|
473 | if r is None: | |
472 | updated += 1 |
|
474 | updated += 1 | |
473 | elif r == 0: |
|
475 | elif r == 0: | |
474 | if action == ACTION_REMOVE: |
|
476 | if action == ACTION_REMOVE: | |
475 | removed += 1 |
|
477 | removed += 1 | |
476 | else: |
|
478 | else: | |
477 | merged += 1 |
|
479 | merged += 1 | |
478 | return updated, merged, removed |
|
480 | return updated, merged, removed | |
479 |
|
481 | |||
480 | def unresolvedcount(self): |
|
482 | def unresolvedcount(self): | |
481 | """get unresolved count for this merge (persistent)""" |
|
483 | """get unresolved count for this merge (persistent)""" | |
482 | return len(list(self.unresolved())) |
|
484 | return len(list(self.unresolved())) | |
483 |
|
485 | |||
484 | def actions(self): |
|
486 | def actions(self): | |
485 | """return lists of actions to perform on the dirstate""" |
|
487 | """return lists of actions to perform on the dirstate""" | |
486 | actions = { |
|
488 | actions = { | |
487 | ACTION_REMOVE: [], |
|
489 | ACTION_REMOVE: [], | |
488 | ACTION_FORGET: [], |
|
490 | ACTION_FORGET: [], | |
489 | ACTION_ADD: [], |
|
491 | ACTION_ADD: [], | |
490 | ACTION_ADD_MODIFIED: [], |
|
492 | ACTION_ADD_MODIFIED: [], | |
491 | ACTION_GET: [], |
|
493 | ACTION_GET: [], | |
492 | } |
|
494 | } | |
493 | for f, (r, action) in pycompat.iteritems(self._results): |
|
495 | for f, (r, action) in pycompat.iteritems(self._results): | |
494 | if action is not None: |
|
496 | if action is not None: | |
495 | actions[action].append((f, None, b"merge result")) |
|
497 | actions[action].append((f, None, b"merge result")) | |
496 | return actions |
|
498 | return actions | |
497 |
|
499 | |||
498 |
|
500 | |||
499 | class mergestate(_mergestate_base): |
|
501 | class mergestate(_mergestate_base): | |
500 |
|
502 | |||
501 | statepathv1 = b'merge/state' |
|
503 | statepathv1 = b'merge/state' | |
502 | statepathv2 = b'merge/state2' |
|
504 | statepathv2 = b'merge/state2' | |
503 |
|
505 | |||
504 | @staticmethod |
|
506 | @staticmethod | |
505 | def clean(repo): |
|
507 | def clean(repo): | |
506 | """Initialize a brand new merge state, removing any existing state on |
|
508 | """Initialize a brand new merge state, removing any existing state on | |
507 | disk.""" |
|
509 | disk.""" | |
508 | ms = mergestate(repo) |
|
510 | ms = mergestate(repo) | |
509 | ms.reset() |
|
511 | ms.reset() | |
510 | return ms |
|
512 | return ms | |
511 |
|
513 | |||
512 | @staticmethod |
|
514 | @staticmethod | |
513 | def read(repo): |
|
515 | def read(repo): | |
514 | """Initialize the merge state, reading it from disk.""" |
|
516 | """Initialize the merge state, reading it from disk.""" | |
515 | ms = mergestate(repo) |
|
517 | ms = mergestate(repo) | |
516 | ms._read() |
|
518 | ms._read() | |
517 | return ms |
|
519 | return ms | |
518 |
|
520 | |||
519 | def _read(self): |
|
521 | def _read(self): | |
520 | """Analyse each record content to restore a serialized state from disk |
|
522 | """Analyse each record content to restore a serialized state from disk | |
521 |
|
523 | |||
522 | This function process "record" entry produced by the de-serialization |
|
524 | This function process "record" entry produced by the de-serialization | |
523 | of on disk file. |
|
525 | of on disk file. | |
524 | """ |
|
526 | """ | |
525 | unsupported = set() |
|
527 | unsupported = set() | |
526 | records = self._readrecords() |
|
528 | records = self._readrecords() | |
527 | for rtype, record in records: |
|
529 | for rtype, record in records: | |
528 | if rtype == RECORD_LOCAL: |
|
530 | if rtype == RECORD_LOCAL: | |
529 | self._local = bin(record) |
|
531 | self._local = bin(record) | |
530 | elif rtype == RECORD_OTHER: |
|
532 | elif rtype == RECORD_OTHER: | |
531 | self._other = bin(record) |
|
533 | self._other = bin(record) | |
532 | elif rtype == LEGACY_MERGE_DRIVER_STATE: |
|
534 | elif rtype == LEGACY_MERGE_DRIVER_STATE: | |
533 | pass |
|
535 | pass | |
534 | elif rtype in ( |
|
536 | elif rtype in ( | |
535 | RECORD_MERGED, |
|
537 | RECORD_MERGED, | |
536 | RECORD_CHANGEDELETE_CONFLICT, |
|
538 | RECORD_CHANGEDELETE_CONFLICT, | |
537 | RECORD_PATH_CONFLICT, |
|
539 | RECORD_PATH_CONFLICT, | |
538 | LEGACY_MERGE_DRIVER_MERGE, |
|
540 | LEGACY_MERGE_DRIVER_MERGE, | |
539 | LEGACY_RECORD_RESOLVED_OTHER, |
|
541 | LEGACY_RECORD_RESOLVED_OTHER, | |
540 | ): |
|
542 | ): | |
541 | bits = record.split(b'\0') |
|
543 | bits = record.split(b'\0') | |
542 | # merge entry type MERGE_RECORD_MERGED_OTHER is deprecated |
|
544 | # merge entry type MERGE_RECORD_MERGED_OTHER is deprecated | |
543 | # and we now store related information in _stateextras, so |
|
545 | # and we now store related information in _stateextras, so | |
544 | # lets write to _stateextras directly |
|
546 | # lets write to _stateextras directly | |
545 | if bits[1] == MERGE_RECORD_MERGED_OTHER: |
|
547 | if bits[1] == MERGE_RECORD_MERGED_OTHER: | |
546 | self._stateextras[bits[0]][b'filenode-source'] = b'other' |
|
548 | self._stateextras[bits[0]][b'filenode-source'] = b'other' | |
547 | else: |
|
549 | else: | |
548 | self._state[bits[0]] = bits[1:] |
|
550 | self._state[bits[0]] = bits[1:] | |
549 | elif rtype == RECORD_FILE_VALUES: |
|
551 | elif rtype == RECORD_FILE_VALUES: | |
550 | filename, rawextras = record.split(b'\0', 1) |
|
552 | filename, rawextras = record.split(b'\0', 1) | |
551 | extraparts = rawextras.split(b'\0') |
|
553 | extraparts = rawextras.split(b'\0') | |
552 | extras = {} |
|
554 | extras = {} | |
553 | i = 0 |
|
555 | i = 0 | |
554 | while i < len(extraparts): |
|
556 | while i < len(extraparts): | |
555 | extras[extraparts[i]] = extraparts[i + 1] |
|
557 | extras[extraparts[i]] = extraparts[i + 1] | |
556 | i += 2 |
|
558 | i += 2 | |
557 |
|
559 | |||
558 | self._stateextras[filename] = extras |
|
560 | self._stateextras[filename] = extras | |
559 | elif rtype == RECORD_LABELS: |
|
561 | elif rtype == RECORD_LABELS: | |
560 | labels = record.split(b'\0', 2) |
|
562 | labels = record.split(b'\0', 2) | |
561 | self._labels = [l for l in labels if len(l) > 0] |
|
563 | self._labels = [l for l in labels if len(l) > 0] | |
562 | elif not rtype.islower(): |
|
564 | elif not rtype.islower(): | |
563 | unsupported.add(rtype) |
|
565 | unsupported.add(rtype) | |
564 |
|
566 | |||
565 | if unsupported: |
|
567 | if unsupported: | |
566 | raise error.UnsupportedMergeRecords(unsupported) |
|
568 | raise error.UnsupportedMergeRecords(unsupported) | |
567 |
|
569 | |||
568 | def _readrecords(self): |
|
570 | def _readrecords(self): | |
569 | """Read merge state from disk and return a list of record (TYPE, data) |
|
571 | """Read merge state from disk and return a list of record (TYPE, data) | |
570 |
|
572 | |||
571 | We read data from both v1 and v2 files and decide which one to use. |
|
573 | We read data from both v1 and v2 files and decide which one to use. | |
572 |
|
574 | |||
573 | V1 has been used by version prior to 2.9.1 and contains less data than |
|
575 | V1 has been used by version prior to 2.9.1 and contains less data than | |
574 | v2. We read both versions and check if no data in v2 contradicts |
|
576 | v2. We read both versions and check if no data in v2 contradicts | |
575 | v1. If there is not contradiction we can safely assume that both v1 |
|
577 | v1. If there is not contradiction we can safely assume that both v1 | |
576 | and v2 were written at the same time and use the extract data in v2. If |
|
578 | and v2 were written at the same time and use the extract data in v2. If | |
577 | there is contradiction we ignore v2 content as we assume an old version |
|
579 | there is contradiction we ignore v2 content as we assume an old version | |
578 | of Mercurial has overwritten the mergestate file and left an old v2 |
|
580 | of Mercurial has overwritten the mergestate file and left an old v2 | |
579 | file around. |
|
581 | file around. | |
580 |
|
582 | |||
581 | returns list of record [(TYPE, data), ...]""" |
|
583 | returns list of record [(TYPE, data), ...]""" | |
582 | v1records = self._readrecordsv1() |
|
584 | v1records = self._readrecordsv1() | |
583 | v2records = self._readrecordsv2() |
|
585 | v2records = self._readrecordsv2() | |
584 | if self._v1v2match(v1records, v2records): |
|
586 | if self._v1v2match(v1records, v2records): | |
585 | return v2records |
|
587 | return v2records | |
586 | else: |
|
588 | else: | |
587 | # v1 file is newer than v2 file, use it |
|
589 | # v1 file is newer than v2 file, use it | |
588 | # we have to infer the "other" changeset of the merge |
|
590 | # we have to infer the "other" changeset of the merge | |
589 | # we cannot do better than that with v1 of the format |
|
591 | # we cannot do better than that with v1 of the format | |
590 | mctx = self._repo[None].parents()[-1] |
|
592 | mctx = self._repo[None].parents()[-1] | |
591 | v1records.append((RECORD_OTHER, mctx.hex())) |
|
593 | v1records.append((RECORD_OTHER, mctx.hex())) | |
592 | # add place holder "other" file node information |
|
594 | # add place holder "other" file node information | |
593 | # nobody is using it yet so we do no need to fetch the data |
|
595 | # nobody is using it yet so we do no need to fetch the data | |
594 | # if mctx was wrong `mctx[bits[-2]]` may fails. |
|
596 | # if mctx was wrong `mctx[bits[-2]]` may fails. | |
595 | for idx, r in enumerate(v1records): |
|
597 | for idx, r in enumerate(v1records): | |
596 | if r[0] == RECORD_MERGED: |
|
598 | if r[0] == RECORD_MERGED: | |
597 | bits = r[1].split(b'\0') |
|
599 | bits = r[1].split(b'\0') | |
598 | bits.insert(-2, b'') |
|
600 | bits.insert(-2, b'') | |
599 | v1records[idx] = (r[0], b'\0'.join(bits)) |
|
601 | v1records[idx] = (r[0], b'\0'.join(bits)) | |
600 | return v1records |
|
602 | return v1records | |
601 |
|
603 | |||
602 | def _v1v2match(self, v1records, v2records): |
|
604 | def _v1v2match(self, v1records, v2records): | |
603 | oldv2 = set() # old format version of v2 record |
|
605 | oldv2 = set() # old format version of v2 record | |
604 | for rec in v2records: |
|
606 | for rec in v2records: | |
605 | if rec[0] == RECORD_LOCAL: |
|
607 | if rec[0] == RECORD_LOCAL: | |
606 | oldv2.add(rec) |
|
608 | oldv2.add(rec) | |
607 | elif rec[0] == RECORD_MERGED: |
|
609 | elif rec[0] == RECORD_MERGED: | |
608 | # drop the onode data (not contained in v1) |
|
610 | # drop the onode data (not contained in v1) | |
609 | oldv2.add((RECORD_MERGED, _droponode(rec[1]))) |
|
611 | oldv2.add((RECORD_MERGED, _droponode(rec[1]))) | |
610 | for rec in v1records: |
|
612 | for rec in v1records: | |
611 | if rec not in oldv2: |
|
613 | if rec not in oldv2: | |
612 | return False |
|
614 | return False | |
613 | else: |
|
615 | else: | |
614 | return True |
|
616 | return True | |
615 |
|
617 | |||
616 | def _readrecordsv1(self): |
|
618 | def _readrecordsv1(self): | |
617 | """read on disk merge state for version 1 file |
|
619 | """read on disk merge state for version 1 file | |
618 |
|
620 | |||
619 | returns list of record [(TYPE, data), ...] |
|
621 | returns list of record [(TYPE, data), ...] | |
620 |
|
622 | |||
621 | Note: the "F" data from this file are one entry short |
|
623 | Note: the "F" data from this file are one entry short | |
622 | (no "other file node" entry) |
|
624 | (no "other file node" entry) | |
623 | """ |
|
625 | """ | |
624 | records = [] |
|
626 | records = [] | |
625 | try: |
|
627 | try: | |
626 | f = self._repo.vfs(self.statepathv1) |
|
628 | f = self._repo.vfs(self.statepathv1) | |
627 | for i, l in enumerate(f): |
|
629 | for i, l in enumerate(f): | |
628 | if i == 0: |
|
630 | if i == 0: | |
629 | records.append((RECORD_LOCAL, l[:-1])) |
|
631 | records.append((RECORD_LOCAL, l[:-1])) | |
630 | else: |
|
632 | else: | |
631 | records.append((RECORD_MERGED, l[:-1])) |
|
633 | records.append((RECORD_MERGED, l[:-1])) | |
632 | f.close() |
|
634 | f.close() | |
633 | except IOError as err: |
|
635 | except IOError as err: | |
634 | if err.errno != errno.ENOENT: |
|
636 | if err.errno != errno.ENOENT: | |
635 | raise |
|
637 | raise | |
636 | return records |
|
638 | return records | |
637 |
|
639 | |||
638 | def _readrecordsv2(self): |
|
640 | def _readrecordsv2(self): | |
639 | """read on disk merge state for version 2 file |
|
641 | """read on disk merge state for version 2 file | |
640 |
|
642 | |||
641 | This format is a list of arbitrary records of the form: |
|
643 | This format is a list of arbitrary records of the form: | |
642 |
|
644 | |||
643 | [type][length][content] |
|
645 | [type][length][content] | |
644 |
|
646 | |||
645 | `type` is a single character, `length` is a 4 byte integer, and |
|
647 | `type` is a single character, `length` is a 4 byte integer, and | |
646 | `content` is an arbitrary byte sequence of length `length`. |
|
648 | `content` is an arbitrary byte sequence of length `length`. | |
647 |
|
649 | |||
648 | Mercurial versions prior to 3.7 have a bug where if there are |
|
650 | Mercurial versions prior to 3.7 have a bug where if there are | |
649 | unsupported mandatory merge records, attempting to clear out the merge |
|
651 | unsupported mandatory merge records, attempting to clear out the merge | |
650 | state with hg update --clean or similar aborts. The 't' record type |
|
652 | state with hg update --clean or similar aborts. The 't' record type | |
651 | works around that by writing out what those versions treat as an |
|
653 | works around that by writing out what those versions treat as an | |
652 | advisory record, but later versions interpret as special: the first |
|
654 | advisory record, but later versions interpret as special: the first | |
653 | character is the 'real' record type and everything onwards is the data. |
|
655 | character is the 'real' record type and everything onwards is the data. | |
654 |
|
656 | |||
655 | Returns list of records [(TYPE, data), ...].""" |
|
657 | Returns list of records [(TYPE, data), ...].""" | |
656 | records = [] |
|
658 | records = [] | |
657 | try: |
|
659 | try: | |
658 | f = self._repo.vfs(self.statepathv2) |
|
660 | f = self._repo.vfs(self.statepathv2) | |
659 | data = f.read() |
|
661 | data = f.read() | |
660 | off = 0 |
|
662 | off = 0 | |
661 | end = len(data) |
|
663 | end = len(data) | |
662 | while off < end: |
|
664 | while off < end: | |
663 | rtype = data[off : off + 1] |
|
665 | rtype = data[off : off + 1] | |
664 | off += 1 |
|
666 | off += 1 | |
665 | length = _unpack(b'>I', data[off : (off + 4)])[0] |
|
667 | length = _unpack(b'>I', data[off : (off + 4)])[0] | |
666 | off += 4 |
|
668 | off += 4 | |
667 | record = data[off : (off + length)] |
|
669 | record = data[off : (off + length)] | |
668 | off += length |
|
670 | off += length | |
669 | if rtype == RECORD_OVERRIDE: |
|
671 | if rtype == RECORD_OVERRIDE: | |
670 | rtype, record = record[0:1], record[1:] |
|
672 | rtype, record = record[0:1], record[1:] | |
671 | records.append((rtype, record)) |
|
673 | records.append((rtype, record)) | |
672 | f.close() |
|
674 | f.close() | |
673 | except IOError as err: |
|
675 | except IOError as err: | |
674 | if err.errno != errno.ENOENT: |
|
676 | if err.errno != errno.ENOENT: | |
675 | raise |
|
677 | raise | |
676 | return records |
|
678 | return records | |
677 |
|
679 | |||
678 | def commit(self): |
|
680 | def commit(self): | |
679 | if self._dirty: |
|
681 | if self._dirty: | |
680 | records = self._makerecords() |
|
682 | records = self._makerecords() | |
681 | self._writerecords(records) |
|
683 | self._writerecords(records) | |
682 | self._dirty = False |
|
684 | self._dirty = False | |
683 |
|
685 | |||
684 | def _makerecords(self): |
|
686 | def _makerecords(self): | |
685 | records = [] |
|
687 | records = [] | |
686 | records.append((RECORD_LOCAL, hex(self._local))) |
|
688 | records.append((RECORD_LOCAL, hex(self._local))) | |
687 | records.append((RECORD_OTHER, hex(self._other))) |
|
689 | records.append((RECORD_OTHER, hex(self._other))) | |
688 | # Write out state items. In all cases, the value of the state map entry |
|
690 | # Write out state items. In all cases, the value of the state map entry | |
689 | # is written as the contents of the record. The record type depends on |
|
691 | # is written as the contents of the record. The record type depends on | |
690 | # the type of state that is stored, and capital-letter records are used |
|
692 | # the type of state that is stored, and capital-letter records are used | |
691 | # to prevent older versions of Mercurial that do not support the feature |
|
693 | # to prevent older versions of Mercurial that do not support the feature | |
692 | # from loading them. |
|
694 | # from loading them. | |
693 | for filename, v in pycompat.iteritems(self._state): |
|
695 | for filename, v in pycompat.iteritems(self._state): | |
694 | if v[0] in ( |
|
696 | if v[0] in ( | |
695 | MERGE_RECORD_UNRESOLVED_PATH, |
|
697 | MERGE_RECORD_UNRESOLVED_PATH, | |
696 | MERGE_RECORD_RESOLVED_PATH, |
|
698 | MERGE_RECORD_RESOLVED_PATH, | |
697 | ): |
|
699 | ): | |
698 | # Path conflicts. These are stored in 'P' records. The current |
|
700 | # Path conflicts. These are stored in 'P' records. The current | |
699 | # resolution state ('pu' or 'pr') is stored within the record. |
|
701 | # resolution state ('pu' or 'pr') is stored within the record. | |
700 | records.append( |
|
702 | records.append( | |
701 | (RECORD_PATH_CONFLICT, b'\0'.join([filename] + v)) |
|
703 | (RECORD_PATH_CONFLICT, b'\0'.join([filename] + v)) | |
702 | ) |
|
704 | ) | |
703 | elif ( |
|
705 | elif ( | |
704 | v[1] == self._repo.nodeconstants.nullhex |
|
706 | v[1] == self._repo.nodeconstants.nullhex | |
705 | or v[6] == self._repo.nodeconstants.nullhex |
|
707 | or v[6] == self._repo.nodeconstants.nullhex | |
706 | ): |
|
708 | ): | |
707 | # Change/Delete or Delete/Change conflicts. These are stored in |
|
709 | # Change/Delete or Delete/Change conflicts. These are stored in | |
708 | # 'C' records. v[1] is the local file, and is nullhex when the |
|
710 | # 'C' records. v[1] is the local file, and is nullhex when the | |
709 | # file is deleted locally ('dc'). v[6] is the remote file, and |
|
711 | # file is deleted locally ('dc'). v[6] is the remote file, and | |
710 | # is nullhex when the file is deleted remotely ('cd'). |
|
712 | # is nullhex when the file is deleted remotely ('cd'). | |
711 | records.append( |
|
713 | records.append( | |
712 | (RECORD_CHANGEDELETE_CONFLICT, b'\0'.join([filename] + v)) |
|
714 | (RECORD_CHANGEDELETE_CONFLICT, b'\0'.join([filename] + v)) | |
713 | ) |
|
715 | ) | |
714 | else: |
|
716 | else: | |
715 | # Normal files. These are stored in 'F' records. |
|
717 | # Normal files. These are stored in 'F' records. | |
716 | records.append((RECORD_MERGED, b'\0'.join([filename] + v))) |
|
718 | records.append((RECORD_MERGED, b'\0'.join([filename] + v))) | |
717 | for filename, extras in sorted(pycompat.iteritems(self._stateextras)): |
|
719 | for filename, extras in sorted(pycompat.iteritems(self._stateextras)): | |
718 | rawextras = b'\0'.join( |
|
720 | rawextras = b'\0'.join( | |
719 | b'%s\0%s' % (k, v) for k, v in pycompat.iteritems(extras) |
|
721 | b'%s\0%s' % (k, v) for k, v in pycompat.iteritems(extras) | |
720 | ) |
|
722 | ) | |
721 | records.append( |
|
723 | records.append( | |
722 | (RECORD_FILE_VALUES, b'%s\0%s' % (filename, rawextras)) |
|
724 | (RECORD_FILE_VALUES, b'%s\0%s' % (filename, rawextras)) | |
723 | ) |
|
725 | ) | |
724 | if self._labels is not None: |
|
726 | if self._labels is not None: | |
725 | labels = b'\0'.join(self._labels) |
|
727 | labels = b'\0'.join(self._labels) | |
726 | records.append((RECORD_LABELS, labels)) |
|
728 | records.append((RECORD_LABELS, labels)) | |
727 | return records |
|
729 | return records | |
728 |
|
730 | |||
729 | def _writerecords(self, records): |
|
731 | def _writerecords(self, records): | |
730 | """Write current state on disk (both v1 and v2)""" |
|
732 | """Write current state on disk (both v1 and v2)""" | |
731 | self._writerecordsv1(records) |
|
733 | self._writerecordsv1(records) | |
732 | self._writerecordsv2(records) |
|
734 | self._writerecordsv2(records) | |
733 |
|
735 | |||
734 | def _writerecordsv1(self, records): |
|
736 | def _writerecordsv1(self, records): | |
735 | """Write current state on disk in a version 1 file""" |
|
737 | """Write current state on disk in a version 1 file""" | |
736 | f = self._repo.vfs(self.statepathv1, b'wb') |
|
738 | f = self._repo.vfs(self.statepathv1, b'wb') | |
737 | irecords = iter(records) |
|
739 | irecords = iter(records) | |
738 | lrecords = next(irecords) |
|
740 | lrecords = next(irecords) | |
739 | assert lrecords[0] == RECORD_LOCAL |
|
741 | assert lrecords[0] == RECORD_LOCAL | |
740 | f.write(hex(self._local) + b'\n') |
|
742 | f.write(hex(self._local) + b'\n') | |
741 | for rtype, data in irecords: |
|
743 | for rtype, data in irecords: | |
742 | if rtype == RECORD_MERGED: |
|
744 | if rtype == RECORD_MERGED: | |
743 | f.write(b'%s\n' % _droponode(data)) |
|
745 | f.write(b'%s\n' % _droponode(data)) | |
744 | f.close() |
|
746 | f.close() | |
745 |
|
747 | |||
746 | def _writerecordsv2(self, records): |
|
748 | def _writerecordsv2(self, records): | |
747 | """Write current state on disk in a version 2 file |
|
749 | """Write current state on disk in a version 2 file | |
748 |
|
750 | |||
749 | See the docstring for _readrecordsv2 for why we use 't'.""" |
|
751 | See the docstring for _readrecordsv2 for why we use 't'.""" | |
750 | # these are the records that all version 2 clients can read |
|
752 | # these are the records that all version 2 clients can read | |
751 | allowlist = (RECORD_LOCAL, RECORD_OTHER, RECORD_MERGED) |
|
753 | allowlist = (RECORD_LOCAL, RECORD_OTHER, RECORD_MERGED) | |
752 | f = self._repo.vfs(self.statepathv2, b'wb') |
|
754 | f = self._repo.vfs(self.statepathv2, b'wb') | |
753 | for key, data in records: |
|
755 | for key, data in records: | |
754 | assert len(key) == 1 |
|
756 | assert len(key) == 1 | |
755 | if key not in allowlist: |
|
757 | if key not in allowlist: | |
756 | key, data = RECORD_OVERRIDE, b'%s%s' % (key, data) |
|
758 | key, data = RECORD_OVERRIDE, b'%s%s' % (key, data) | |
757 | format = b'>sI%is' % len(data) |
|
759 | format = b'>sI%is' % len(data) | |
758 | f.write(_pack(format, key, len(data), data)) |
|
760 | f.write(_pack(format, key, len(data), data)) | |
759 | f.close() |
|
761 | f.close() | |
760 |
|
762 | |||
761 | def _make_backup(self, fctx, localkey): |
|
763 | def _make_backup(self, fctx, localkey): | |
762 | self._repo.vfs.write(b'merge/' + localkey, fctx.data()) |
|
764 | self._repo.vfs.write(b'merge/' + localkey, fctx.data()) | |
763 |
|
765 | |||
764 | def _restore_backup(self, fctx, localkey, flags): |
|
766 | def _restore_backup(self, fctx, localkey, flags): | |
765 | with self._repo.vfs(b'merge/' + localkey) as f: |
|
767 | with self._repo.vfs(b'merge/' + localkey) as f: | |
766 | fctx.write(f.read(), flags) |
|
768 | fctx.write(f.read(), flags) | |
767 |
|
769 | |||
768 | def reset(self): |
|
770 | def reset(self): | |
769 | shutil.rmtree(self._repo.vfs.join(b'merge'), True) |
|
771 | shutil.rmtree(self._repo.vfs.join(b'merge'), True) | |
770 |
|
772 | |||
771 |
|
773 | |||
772 | class memmergestate(_mergestate_base): |
|
774 | class memmergestate(_mergestate_base): | |
773 | def __init__(self, repo): |
|
775 | def __init__(self, repo): | |
774 | super(memmergestate, self).__init__(repo) |
|
776 | super(memmergestate, self).__init__(repo) | |
775 | self._backups = {} |
|
777 | self._backups = {} | |
776 |
|
778 | |||
777 | def _make_backup(self, fctx, localkey): |
|
779 | def _make_backup(self, fctx, localkey): | |
778 | self._backups[localkey] = fctx.data() |
|
780 | self._backups[localkey] = fctx.data() | |
779 |
|
781 | |||
780 | def _restore_backup(self, fctx, localkey, flags): |
|
782 | def _restore_backup(self, fctx, localkey, flags): | |
781 | fctx.write(self._backups[localkey], flags) |
|
783 | fctx.write(self._backups[localkey], flags) | |
782 |
|
784 | |||
783 |
|
785 | |||
784 | def recordupdates(repo, actions, branchmerge, getfiledata): |
|
786 | def recordupdates(repo, actions, branchmerge, getfiledata): | |
785 | """record merge actions to the dirstate""" |
|
787 | """record merge actions to the dirstate""" | |
786 | # remove (must come first) |
|
788 | # remove (must come first) | |
787 | for f, args, msg in actions.get(ACTION_REMOVE, []): |
|
789 | for f, args, msg in actions.get(ACTION_REMOVE, []): | |
788 | if branchmerge: |
|
790 | if branchmerge: | |
789 | repo.dirstate.update_file(f, p1_tracked=True, wc_tracked=False) |
|
791 | repo.dirstate.update_file(f, p1_tracked=True, wc_tracked=False) | |
790 | else: |
|
792 | else: | |
791 | repo.dirstate.update_file(f, p1_tracked=False, wc_tracked=False) |
|
793 | repo.dirstate.update_file(f, p1_tracked=False, wc_tracked=False) | |
792 |
|
794 | |||
793 | # forget (must come first) |
|
795 | # forget (must come first) | |
794 | for f, args, msg in actions.get(ACTION_FORGET, []): |
|
796 | for f, args, msg in actions.get(ACTION_FORGET, []): | |
795 | repo.dirstate.update_file(f, p1_tracked=False, wc_tracked=False) |
|
797 | repo.dirstate.update_file(f, p1_tracked=False, wc_tracked=False) | |
796 |
|
798 | |||
797 | # resolve path conflicts |
|
799 | # resolve path conflicts | |
798 | for f, args, msg in actions.get(ACTION_PATH_CONFLICT_RESOLVE, []): |
|
800 | for f, args, msg in actions.get(ACTION_PATH_CONFLICT_RESOLVE, []): | |
799 | (f0, origf0) = args |
|
801 | (f0, origf0) = args | |
800 | repo.dirstate.update_file(f, p1_tracked=False, wc_tracked=True) |
|
802 | repo.dirstate.update_file(f, p1_tracked=False, wc_tracked=True) | |
801 | repo.dirstate.copy(origf0, f) |
|
803 | repo.dirstate.copy(origf0, f) | |
802 | if f0 == origf0: |
|
804 | if f0 == origf0: | |
803 | repo.dirstate.update_file(f0, p1_tracked=True, wc_tracked=False) |
|
805 | repo.dirstate.update_file(f0, p1_tracked=True, wc_tracked=False) | |
804 | else: |
|
806 | else: | |
805 | repo.dirstate.update_file(f0, p1_tracked=False, wc_tracked=False) |
|
807 | repo.dirstate.update_file(f0, p1_tracked=False, wc_tracked=False) | |
806 |
|
808 | |||
807 | # re-add |
|
809 | # re-add | |
808 | for f, args, msg in actions.get(ACTION_ADD, []): |
|
810 | for f, args, msg in actions.get(ACTION_ADD, []): | |
809 | repo.dirstate.update_file(f, p1_tracked=False, wc_tracked=True) |
|
811 | repo.dirstate.update_file(f, p1_tracked=False, wc_tracked=True) | |
810 |
|
812 | |||
811 | # re-add/mark as modified |
|
813 | # re-add/mark as modified | |
812 | for f, args, msg in actions.get(ACTION_ADD_MODIFIED, []): |
|
814 | for f, args, msg in actions.get(ACTION_ADD_MODIFIED, []): | |
813 | if branchmerge: |
|
815 | if branchmerge: | |
814 | repo.dirstate.update_file( |
|
816 | repo.dirstate.update_file( | |
815 | f, p1_tracked=True, wc_tracked=True, possibly_dirty=True |
|
817 | f, p1_tracked=True, wc_tracked=True, possibly_dirty=True | |
816 | ) |
|
818 | ) | |
817 | else: |
|
819 | else: | |
818 | repo.dirstate.update_file(f, p1_tracked=False, wc_tracked=True) |
|
820 | repo.dirstate.update_file(f, p1_tracked=False, wc_tracked=True) | |
819 |
|
821 | |||
820 | # exec change |
|
822 | # exec change | |
821 | for f, args, msg in actions.get(ACTION_EXEC, []): |
|
823 | for f, args, msg in actions.get(ACTION_EXEC, []): | |
822 | repo.dirstate.update_file( |
|
824 | repo.dirstate.update_file( | |
823 | f, p1_tracked=True, wc_tracked=True, possibly_dirty=True |
|
825 | f, p1_tracked=True, wc_tracked=True, possibly_dirty=True | |
824 | ) |
|
826 | ) | |
825 |
|
827 | |||
826 | # keep |
|
828 | # keep | |
827 | for f, args, msg in actions.get(ACTION_KEEP, []): |
|
829 | for f, args, msg in actions.get(ACTION_KEEP, []): | |
828 | pass |
|
830 | pass | |
829 |
|
831 | |||
830 | # keep deleted |
|
832 | # keep deleted | |
831 | for f, args, msg in actions.get(ACTION_KEEP_ABSENT, []): |
|
833 | for f, args, msg in actions.get(ACTION_KEEP_ABSENT, []): | |
832 | pass |
|
834 | pass | |
833 |
|
835 | |||
834 | # keep new |
|
836 | # keep new | |
835 | for f, args, msg in actions.get(ACTION_KEEP_NEW, []): |
|
837 | for f, args, msg in actions.get(ACTION_KEEP_NEW, []): | |
836 | pass |
|
838 | pass | |
837 |
|
839 | |||
838 | # get |
|
840 | # get | |
839 | for f, args, msg in actions.get(ACTION_GET, []): |
|
841 | for f, args, msg in actions.get(ACTION_GET, []): | |
840 | if branchmerge: |
|
842 | if branchmerge: | |
841 | # tracked in p1 can be True also but update_file should not care |
|
843 | # tracked in p1 can be True also but update_file should not care | |
842 | old_entry = repo.dirstate.get_entry(f) |
|
844 | old_entry = repo.dirstate.get_entry(f) | |
843 | p1_tracked = old_entry.any_tracked and not old_entry.added |
|
845 | p1_tracked = old_entry.any_tracked and not old_entry.added | |
844 | repo.dirstate.update_file( |
|
846 | repo.dirstate.update_file( | |
845 | f, |
|
847 | f, | |
846 | p1_tracked=p1_tracked, |
|
848 | p1_tracked=p1_tracked, | |
847 | wc_tracked=True, |
|
849 | wc_tracked=True, | |
848 | p2_info=True, |
|
850 | p2_info=True, | |
849 | ) |
|
851 | ) | |
850 | else: |
|
852 | else: | |
851 | parentfiledata = getfiledata[f] if getfiledata else None |
|
853 | parentfiledata = getfiledata[f] if getfiledata else None | |
852 | repo.dirstate.update_file( |
|
854 | repo.dirstate.update_file( | |
853 | f, |
|
855 | f, | |
854 | p1_tracked=True, |
|
856 | p1_tracked=True, | |
855 | wc_tracked=True, |
|
857 | wc_tracked=True, | |
856 | parentfiledata=parentfiledata, |
|
858 | parentfiledata=parentfiledata, | |
857 | ) |
|
859 | ) | |
858 |
|
860 | |||
859 | # merge |
|
861 | # merge | |
860 | for f, args, msg in actions.get(ACTION_MERGE, []): |
|
862 | for f, args, msg in actions.get(ACTION_MERGE, []): | |
861 | f1, f2, fa, move, anc = args |
|
863 | f1, f2, fa, move, anc = args | |
862 | if branchmerge: |
|
864 | if branchmerge: | |
863 | # We've done a branch merge, mark this file as merged |
|
865 | # We've done a branch merge, mark this file as merged | |
864 | # so that we properly record the merger later |
|
866 | # so that we properly record the merger later | |
865 | p1_tracked = f1 == f |
|
867 | p1_tracked = f1 == f | |
866 | repo.dirstate.update_file( |
|
868 | repo.dirstate.update_file( | |
867 | f, |
|
869 | f, | |
868 | p1_tracked=p1_tracked, |
|
870 | p1_tracked=p1_tracked, | |
869 | wc_tracked=True, |
|
871 | wc_tracked=True, | |
870 | p2_info=True, |
|
872 | p2_info=True, | |
871 | ) |
|
873 | ) | |
872 | if f1 != f2: # copy/rename |
|
874 | if f1 != f2: # copy/rename | |
873 | if move: |
|
875 | if move: | |
874 | repo.dirstate.update_file( |
|
876 | repo.dirstate.update_file( | |
875 | f1, p1_tracked=True, wc_tracked=False |
|
877 | f1, p1_tracked=True, wc_tracked=False | |
876 | ) |
|
878 | ) | |
877 | if f1 != f: |
|
879 | if f1 != f: | |
878 | repo.dirstate.copy(f1, f) |
|
880 | repo.dirstate.copy(f1, f) | |
879 | else: |
|
881 | else: | |
880 | repo.dirstate.copy(f2, f) |
|
882 | repo.dirstate.copy(f2, f) | |
881 | else: |
|
883 | else: | |
882 | # We've update-merged a locally modified file, so |
|
884 | # We've update-merged a locally modified file, so | |
883 | # we set the dirstate to emulate a normal checkout |
|
885 | # we set the dirstate to emulate a normal checkout | |
884 | # of that file some time in the past. Thus our |
|
886 | # of that file some time in the past. Thus our | |
885 | # merge will appear as a normal local file |
|
887 | # merge will appear as a normal local file | |
886 | # modification. |
|
888 | # modification. | |
887 | if f2 == f: # file not locally copied/moved |
|
889 | if f2 == f: # file not locally copied/moved | |
888 | repo.dirstate.update_file( |
|
890 | repo.dirstate.update_file( | |
889 | f, p1_tracked=True, wc_tracked=True, possibly_dirty=True |
|
891 | f, p1_tracked=True, wc_tracked=True, possibly_dirty=True | |
890 | ) |
|
892 | ) | |
891 | if move: |
|
893 | if move: | |
892 | repo.dirstate.update_file( |
|
894 | repo.dirstate.update_file( | |
893 | f1, p1_tracked=False, wc_tracked=False |
|
895 | f1, p1_tracked=False, wc_tracked=False | |
894 | ) |
|
896 | ) | |
895 |
|
897 | |||
896 | # directory rename, move local |
|
898 | # directory rename, move local | |
897 | for f, args, msg in actions.get(ACTION_DIR_RENAME_MOVE_LOCAL, []): |
|
899 | for f, args, msg in actions.get(ACTION_DIR_RENAME_MOVE_LOCAL, []): | |
898 | f0, flag = args |
|
900 | f0, flag = args | |
899 | if branchmerge: |
|
901 | if branchmerge: | |
900 | repo.dirstate.update_file(f, p1_tracked=False, wc_tracked=True) |
|
902 | repo.dirstate.update_file(f, p1_tracked=False, wc_tracked=True) | |
901 | repo.dirstate.update_file(f0, p1_tracked=True, wc_tracked=False) |
|
903 | repo.dirstate.update_file(f0, p1_tracked=True, wc_tracked=False) | |
902 | repo.dirstate.copy(f0, f) |
|
904 | repo.dirstate.copy(f0, f) | |
903 | else: |
|
905 | else: | |
904 | repo.dirstate.update_file(f, p1_tracked=True, wc_tracked=True) |
|
906 | repo.dirstate.update_file(f, p1_tracked=True, wc_tracked=True) | |
905 | repo.dirstate.update_file(f0, p1_tracked=False, wc_tracked=False) |
|
907 | repo.dirstate.update_file(f0, p1_tracked=False, wc_tracked=False) | |
906 |
|
908 | |||
907 | # directory rename, get |
|
909 | # directory rename, get | |
908 | for f, args, msg in actions.get(ACTION_LOCAL_DIR_RENAME_GET, []): |
|
910 | for f, args, msg in actions.get(ACTION_LOCAL_DIR_RENAME_GET, []): | |
909 | f0, flag = args |
|
911 | f0, flag = args | |
910 | if branchmerge: |
|
912 | if branchmerge: | |
911 | repo.dirstate.update_file(f, p1_tracked=False, wc_tracked=True) |
|
913 | repo.dirstate.update_file(f, p1_tracked=False, wc_tracked=True) | |
912 | repo.dirstate.copy(f0, f) |
|
914 | repo.dirstate.copy(f0, f) | |
913 | else: |
|
915 | else: | |
914 | repo.dirstate.update_file(f, p1_tracked=True, wc_tracked=True) |
|
916 | repo.dirstate.update_file(f, p1_tracked=True, wc_tracked=True) |
@@ -1,1316 +1,1316 b'' | |||||
1 | This file contains testcases that tend to be related to special cases or less |
|
1 | This file contains testcases that tend to be related to special cases or less | |
2 | common commands affecting largefile. |
|
2 | common commands affecting largefile. | |
3 |
|
3 | |||
4 | $ hg init requirements |
|
4 | $ hg init requirements | |
5 | $ cd requirements |
|
5 | $ cd requirements | |
6 |
|
6 | |||
7 | # largefiles not loaded by default. |
|
7 | # largefiles not loaded by default. | |
8 |
|
8 | |||
9 | $ hg config extensions |
|
9 | $ hg config extensions | |
10 | [1] |
|
10 | [1] | |
11 |
|
11 | |||
12 | # Adding largefiles to requires file will auto-load largefiles extension. |
|
12 | # Adding largefiles to requires file will auto-load largefiles extension. | |
13 |
|
13 | |||
14 | $ echo largefiles >> .hg/requires |
|
14 | $ echo largefiles >> .hg/requires | |
15 | $ hg config extensions |
|
15 | $ hg config extensions | |
16 | extensions.largefiles= |
|
16 | extensions.largefiles= | |
17 |
|
17 | |||
18 | # But only if there is no config entry for the extension already. |
|
18 | # But only if there is no config entry for the extension already. | |
19 |
|
19 | |||
20 | $ cat > .hg/hgrc << EOF |
|
20 | $ cat > .hg/hgrc << EOF | |
21 | > [extensions] |
|
21 | > [extensions] | |
22 | > largefiles=! |
|
22 | > largefiles=! | |
23 | > EOF |
|
23 | > EOF | |
24 |
|
24 | |||
25 | $ hg config extensions |
|
25 | $ hg config extensions | |
26 | abort: repository requires features unknown to this Mercurial: largefiles |
|
26 | abort: repository requires features unknown to this Mercurial: largefiles | |
27 | (see https://mercurial-scm.org/wiki/MissingRequirement for more information) |
|
27 | (see https://mercurial-scm.org/wiki/MissingRequirement for more information) | |
28 | [255] |
|
28 | [255] | |
29 |
|
29 | |||
30 | $ cat > .hg/hgrc << EOF |
|
30 | $ cat > .hg/hgrc << EOF | |
31 | > [extensions] |
|
31 | > [extensions] | |
32 | > largefiles= |
|
32 | > largefiles= | |
33 | > EOF |
|
33 | > EOF | |
34 |
|
34 | |||
35 | $ hg config extensions |
|
35 | $ hg config extensions | |
36 | extensions.largefiles= |
|
36 | extensions.largefiles= | |
37 |
|
37 | |||
38 | $ cat > .hg/hgrc << EOF |
|
38 | $ cat > .hg/hgrc << EOF | |
39 | > [extensions] |
|
39 | > [extensions] | |
40 | > largefiles = missing.py |
|
40 | > largefiles = missing.py | |
41 | > EOF |
|
41 | > EOF | |
42 |
|
42 | |||
43 | $ hg config extensions |
|
43 | $ hg config extensions | |
44 | \*\*\* failed to import extension "largefiles" from missing.py: [Errno *] $ENOENT$: 'missing.py' (glob) |
|
44 | \*\*\* failed to import extension "largefiles" from missing.py: [Errno *] $ENOENT$: 'missing.py' (glob) | |
45 | abort: repository requires features unknown to this Mercurial: largefiles |
|
45 | abort: repository requires features unknown to this Mercurial: largefiles | |
46 | (see https://mercurial-scm.org/wiki/MissingRequirement for more information) |
|
46 | (see https://mercurial-scm.org/wiki/MissingRequirement for more information) | |
47 | [255] |
|
47 | [255] | |
48 |
|
48 | |||
49 | $ cd .. |
|
49 | $ cd .. | |
50 |
|
50 | |||
51 | Each sections should be independent of each others. |
|
51 | Each sections should be independent of each others. | |
52 |
|
52 | |||
53 | $ USERCACHE="$TESTTMP/cache"; export USERCACHE |
|
53 | $ USERCACHE="$TESTTMP/cache"; export USERCACHE | |
54 | $ mkdir "${USERCACHE}" |
|
54 | $ mkdir "${USERCACHE}" | |
55 | $ cat >> $HGRCPATH <<EOF |
|
55 | $ cat >> $HGRCPATH <<EOF | |
56 | > [extensions] |
|
56 | > [extensions] | |
57 | > largefiles= |
|
57 | > largefiles= | |
58 | > purge= |
|
58 | > purge= | |
59 | > rebase= |
|
59 | > rebase= | |
60 | > transplant= |
|
60 | > transplant= | |
61 | > [phases] |
|
61 | > [phases] | |
62 | > publish=False |
|
62 | > publish=False | |
63 | > [largefiles] |
|
63 | > [largefiles] | |
64 | > minsize=2 |
|
64 | > minsize=2 | |
65 | > patterns=glob:**.dat |
|
65 | > patterns=glob:**.dat | |
66 | > usercache=${USERCACHE} |
|
66 | > usercache=${USERCACHE} | |
67 | > [hooks] |
|
67 | > [hooks] | |
68 | > precommit=sh -c "echo \\"Invoking status precommit hook\\"; hg status" |
|
68 | > precommit=sh -c "echo \\"Invoking status precommit hook\\"; hg status" | |
69 | > EOF |
|
69 | > EOF | |
70 |
|
70 | |||
71 |
|
71 | |||
72 |
|
72 | |||
73 | Test copies and moves from a directory other than root (issue3516) |
|
73 | Test copies and moves from a directory other than root (issue3516) | |
74 | ========================================================================= |
|
74 | ========================================================================= | |
75 |
|
75 | |||
76 | $ hg init lf_cpmv |
|
76 | $ hg init lf_cpmv | |
77 | $ cd lf_cpmv |
|
77 | $ cd lf_cpmv | |
78 | $ mkdir dira |
|
78 | $ mkdir dira | |
79 | $ mkdir dira/dirb |
|
79 | $ mkdir dira/dirb | |
80 | $ touch dira/dirb/largefile |
|
80 | $ touch dira/dirb/largefile | |
81 | $ hg add --large dira/dirb/largefile |
|
81 | $ hg add --large dira/dirb/largefile | |
82 | $ hg commit -m "added" |
|
82 | $ hg commit -m "added" | |
83 | Invoking status precommit hook |
|
83 | Invoking status precommit hook | |
84 | A dira/dirb/largefile |
|
84 | A dira/dirb/largefile | |
85 | $ cd dira |
|
85 | $ cd dira | |
86 | $ hg cp dirb/largefile foo/largefile |
|
86 | $ hg cp dirb/largefile foo/largefile | |
87 |
|
87 | |||
88 | TODO: Ideally, this should mention the largefile, not the standin |
|
88 | TODO: Ideally, this should mention the largefile, not the standin | |
89 | $ hg log -T '{rev}\n' --stat 'set:clean()' |
|
89 | $ hg log -T '{rev}\n' --stat 'set:clean()' | |
90 | 0 |
|
90 | 0 | |
91 | .hglf/dira/dirb/largefile | 1 + |
|
91 | .hglf/dira/dirb/largefile | 1 + | |
92 | 1 files changed, 1 insertions(+), 0 deletions(-) |
|
92 | 1 files changed, 1 insertions(+), 0 deletions(-) | |
93 |
|
93 | |||
94 | $ hg ci -m "deep copy" |
|
94 | $ hg ci -m "deep copy" | |
95 | Invoking status precommit hook |
|
95 | Invoking status precommit hook | |
96 | A dira/foo/largefile |
|
96 | A dira/foo/largefile | |
97 | $ find . | sort |
|
97 | $ find . | sort | |
98 | . |
|
98 | . | |
99 | ./dirb |
|
99 | ./dirb | |
100 | ./dirb/largefile |
|
100 | ./dirb/largefile | |
101 | ./foo |
|
101 | ./foo | |
102 | ./foo/largefile |
|
102 | ./foo/largefile | |
103 | $ hg mv foo/largefile baz/largefile |
|
103 | $ hg mv foo/largefile baz/largefile | |
104 | $ hg ci -m "moved" |
|
104 | $ hg ci -m "moved" | |
105 | Invoking status precommit hook |
|
105 | Invoking status precommit hook | |
106 | A dira/baz/largefile |
|
106 | A dira/baz/largefile | |
107 | R dira/foo/largefile |
|
107 | R dira/foo/largefile | |
108 | $ find . | sort |
|
108 | $ find . | sort | |
109 | . |
|
109 | . | |
110 | ./baz |
|
110 | ./baz | |
111 | ./baz/largefile |
|
111 | ./baz/largefile | |
112 | ./dirb |
|
112 | ./dirb | |
113 | ./dirb/largefile |
|
113 | ./dirb/largefile | |
114 | $ cd .. |
|
114 | $ cd .. | |
115 | $ hg mv dira dirc |
|
115 | $ hg mv dira dirc | |
116 | moving .hglf/dira/baz/largefile to .hglf/dirc/baz/largefile |
|
116 | moving .hglf/dira/baz/largefile to .hglf/dirc/baz/largefile | |
117 | moving .hglf/dira/dirb/largefile to .hglf/dirc/dirb/largefile |
|
117 | moving .hglf/dira/dirb/largefile to .hglf/dirc/dirb/largefile | |
118 | $ find * | sort |
|
118 | $ find * | sort | |
119 | dirc |
|
119 | dirc | |
120 | dirc/baz |
|
120 | dirc/baz | |
121 | dirc/baz/largefile |
|
121 | dirc/baz/largefile | |
122 | dirc/dirb |
|
122 | dirc/dirb | |
123 | dirc/dirb/largefile |
|
123 | dirc/dirb/largefile | |
124 |
|
124 | |||
125 | $ hg clone -q . ../fetch |
|
125 | $ hg clone -q . ../fetch | |
126 | $ hg --config extensions.fetch= fetch ../fetch |
|
126 | $ hg --config extensions.fetch= fetch ../fetch | |
127 | abort: uncommitted changes |
|
127 | abort: uncommitted changes | |
128 | [20] |
|
128 | [20] | |
129 | $ hg up -qC |
|
129 | $ hg up -qC | |
130 | $ cd .. |
|
130 | $ cd .. | |
131 |
|
131 | |||
132 | Clone a local repository owned by another user |
|
132 | Clone a local repository owned by another user | |
133 | =================================================== |
|
133 | =================================================== | |
134 |
|
134 | |||
135 | #if unix-permissions |
|
135 | #if unix-permissions | |
136 |
|
136 | |||
137 | We have to simulate that here by setting $HOME and removing write permissions |
|
137 | We have to simulate that here by setting $HOME and removing write permissions | |
138 | $ ORIGHOME="$HOME" |
|
138 | $ ORIGHOME="$HOME" | |
139 | $ mkdir alice |
|
139 | $ mkdir alice | |
140 | $ HOME="`pwd`/alice" |
|
140 | $ HOME="`pwd`/alice" | |
141 | $ cd alice |
|
141 | $ cd alice | |
142 | $ hg init pubrepo |
|
142 | $ hg init pubrepo | |
143 | $ cd pubrepo |
|
143 | $ cd pubrepo | |
144 | $ dd if=/dev/zero bs=1k count=11k > a-large-file 2> /dev/null |
|
144 | $ dd if=/dev/zero bs=1k count=11k > a-large-file 2> /dev/null | |
145 | $ hg add --large a-large-file |
|
145 | $ hg add --large a-large-file | |
146 | $ hg commit -m "Add a large file" |
|
146 | $ hg commit -m "Add a large file" | |
147 | Invoking status precommit hook |
|
147 | Invoking status precommit hook | |
148 | A a-large-file |
|
148 | A a-large-file | |
149 | $ cd .. |
|
149 | $ cd .. | |
150 | $ chmod -R a-w pubrepo |
|
150 | $ chmod -R a-w pubrepo | |
151 | $ cd .. |
|
151 | $ cd .. | |
152 | $ mkdir bob |
|
152 | $ mkdir bob | |
153 | $ HOME="`pwd`/bob" |
|
153 | $ HOME="`pwd`/bob" | |
154 | $ cd bob |
|
154 | $ cd bob | |
155 | $ hg clone --pull ../alice/pubrepo pubrepo |
|
155 | $ hg clone --pull ../alice/pubrepo pubrepo | |
156 | requesting all changes |
|
156 | requesting all changes | |
157 | adding changesets |
|
157 | adding changesets | |
158 | adding manifests |
|
158 | adding manifests | |
159 | adding file changes |
|
159 | adding file changes | |
160 | added 1 changesets with 1 changes to 1 files |
|
160 | added 1 changesets with 1 changes to 1 files | |
161 | new changesets 09a186cfa6da (1 drafts) |
|
161 | new changesets 09a186cfa6da (1 drafts) | |
162 | updating to branch default |
|
162 | updating to branch default | |
163 | getting changed largefiles |
|
163 | getting changed largefiles | |
164 | 1 largefiles updated, 0 removed |
|
164 | 1 largefiles updated, 0 removed | |
165 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
165 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved | |
166 | $ cd .. |
|
166 | $ cd .. | |
167 | $ chmod -R u+w alice/pubrepo |
|
167 | $ chmod -R u+w alice/pubrepo | |
168 | $ HOME="$ORIGHOME" |
|
168 | $ HOME="$ORIGHOME" | |
169 |
|
169 | |||
170 | #endif |
|
170 | #endif | |
171 |
|
171 | |||
172 |
|
172 | |||
173 | Symlink to a large largefile should behave the same as a symlink to a normal file |
|
173 | Symlink to a large largefile should behave the same as a symlink to a normal file | |
174 | ===================================================================================== |
|
174 | ===================================================================================== | |
175 |
|
175 | |||
176 | #if symlink |
|
176 | #if symlink | |
177 |
|
177 | |||
178 | $ hg init largesymlink |
|
178 | $ hg init largesymlink | |
179 | $ cd largesymlink |
|
179 | $ cd largesymlink | |
180 | $ dd if=/dev/zero bs=1k count=10k of=largefile 2>/dev/null |
|
180 | $ dd if=/dev/zero bs=1k count=10k of=largefile 2>/dev/null | |
181 | $ hg add --large largefile |
|
181 | $ hg add --large largefile | |
182 | $ hg commit -m "commit a large file" |
|
182 | $ hg commit -m "commit a large file" | |
183 | Invoking status precommit hook |
|
183 | Invoking status precommit hook | |
184 | A largefile |
|
184 | A largefile | |
185 | $ ln -s largefile largelink |
|
185 | $ ln -s largefile largelink | |
186 | $ hg add largelink |
|
186 | $ hg add largelink | |
187 | $ hg commit -m "commit a large symlink" |
|
187 | $ hg commit -m "commit a large symlink" | |
188 | Invoking status precommit hook |
|
188 | Invoking status precommit hook | |
189 | A largelink |
|
189 | A largelink | |
190 | $ rm -f largelink |
|
190 | $ rm -f largelink | |
191 | $ hg up >/dev/null |
|
191 | $ hg up >/dev/null | |
192 | $ test -f largelink |
|
192 | $ test -f largelink | |
193 | [1] |
|
193 | [1] | |
194 | $ test -L largelink |
|
194 | $ test -L largelink | |
195 | [1] |
|
195 | [1] | |
196 | $ rm -f largelink # make next part of the test independent of the previous |
|
196 | $ rm -f largelink # make next part of the test independent of the previous | |
197 | $ hg up -C >/dev/null |
|
197 | $ hg up -C >/dev/null | |
198 | $ test -f largelink |
|
198 | $ test -f largelink | |
199 | $ test -L largelink |
|
199 | $ test -L largelink | |
200 | $ cd .. |
|
200 | $ cd .. | |
201 |
|
201 | |||
202 | #endif |
|
202 | #endif | |
203 |
|
203 | |||
204 |
|
204 | |||
205 | test for pattern matching on 'hg status': |
|
205 | test for pattern matching on 'hg status': | |
206 | ============================================== |
|
206 | ============================================== | |
207 |
|
207 | |||
208 |
|
208 | |||
209 | to boost performance, largefiles checks whether specified patterns are |
|
209 | to boost performance, largefiles checks whether specified patterns are | |
210 | related to largefiles in working directory (NOT to STANDIN) or not. |
|
210 | related to largefiles in working directory (NOT to STANDIN) or not. | |
211 |
|
211 | |||
212 | $ hg init statusmatch |
|
212 | $ hg init statusmatch | |
213 | $ cd statusmatch |
|
213 | $ cd statusmatch | |
214 |
|
214 | |||
215 | $ mkdir -p a/b/c/d |
|
215 | $ mkdir -p a/b/c/d | |
216 | $ echo normal > a/b/c/d/e.normal.txt |
|
216 | $ echo normal > a/b/c/d/e.normal.txt | |
217 | $ hg add a/b/c/d/e.normal.txt |
|
217 | $ hg add a/b/c/d/e.normal.txt | |
218 | $ echo large > a/b/c/d/e.large.txt |
|
218 | $ echo large > a/b/c/d/e.large.txt | |
219 | $ hg add --large a/b/c/d/e.large.txt |
|
219 | $ hg add --large a/b/c/d/e.large.txt | |
220 | $ mkdir -p a/b/c/x |
|
220 | $ mkdir -p a/b/c/x | |
221 | $ echo normal > a/b/c/x/y.normal.txt |
|
221 | $ echo normal > a/b/c/x/y.normal.txt | |
222 | $ hg add a/b/c/x/y.normal.txt |
|
222 | $ hg add a/b/c/x/y.normal.txt | |
223 | $ hg commit -m 'add files' |
|
223 | $ hg commit -m 'add files' | |
224 | Invoking status precommit hook |
|
224 | Invoking status precommit hook | |
225 | A a/b/c/d/e.large.txt |
|
225 | A a/b/c/d/e.large.txt | |
226 | A a/b/c/d/e.normal.txt |
|
226 | A a/b/c/d/e.normal.txt | |
227 | A a/b/c/x/y.normal.txt |
|
227 | A a/b/c/x/y.normal.txt | |
228 |
|
228 | |||
229 | (1) no pattern: no performance boost |
|
229 | (1) no pattern: no performance boost | |
230 | $ hg status -A |
|
230 | $ hg status -A | |
231 | C a/b/c/d/e.large.txt |
|
231 | C a/b/c/d/e.large.txt | |
232 | C a/b/c/d/e.normal.txt |
|
232 | C a/b/c/d/e.normal.txt | |
233 | C a/b/c/x/y.normal.txt |
|
233 | C a/b/c/x/y.normal.txt | |
234 |
|
234 | |||
235 | (2) pattern not related to largefiles: performance boost |
|
235 | (2) pattern not related to largefiles: performance boost | |
236 | $ hg status -A a/b/c/x |
|
236 | $ hg status -A a/b/c/x | |
237 | C a/b/c/x/y.normal.txt |
|
237 | C a/b/c/x/y.normal.txt | |
238 |
|
238 | |||
239 | (3) pattern related to largefiles: no performance boost |
|
239 | (3) pattern related to largefiles: no performance boost | |
240 | $ hg status -A a/b/c/d |
|
240 | $ hg status -A a/b/c/d | |
241 | C a/b/c/d/e.large.txt |
|
241 | C a/b/c/d/e.large.txt | |
242 | C a/b/c/d/e.normal.txt |
|
242 | C a/b/c/d/e.normal.txt | |
243 |
|
243 | |||
244 | (4) pattern related to STANDIN (not to largefiles): performance boost |
|
244 | (4) pattern related to STANDIN (not to largefiles): performance boost | |
245 | $ hg status -A .hglf/a |
|
245 | $ hg status -A .hglf/a | |
246 | C .hglf/a/b/c/d/e.large.txt |
|
246 | C .hglf/a/b/c/d/e.large.txt | |
247 |
|
247 | |||
248 | (5) mixed case: no performance boost |
|
248 | (5) mixed case: no performance boost | |
249 | $ hg status -A a/b/c/x a/b/c/d |
|
249 | $ hg status -A a/b/c/x a/b/c/d | |
250 | C a/b/c/d/e.large.txt |
|
250 | C a/b/c/d/e.large.txt | |
251 | C a/b/c/d/e.normal.txt |
|
251 | C a/b/c/d/e.normal.txt | |
252 | C a/b/c/x/y.normal.txt |
|
252 | C a/b/c/x/y.normal.txt | |
253 |
|
253 | |||
254 | verify that largefiles doesn't break filesets |
|
254 | verify that largefiles doesn't break filesets | |
255 |
|
255 | |||
256 | $ hg log --rev . --exclude "set:binary()" |
|
256 | $ hg log --rev . --exclude "set:binary()" | |
257 | changeset: 0:41bd42f10efa |
|
257 | changeset: 0:41bd42f10efa | |
258 | tag: tip |
|
258 | tag: tip | |
259 | user: test |
|
259 | user: test | |
260 | date: Thu Jan 01 00:00:00 1970 +0000 |
|
260 | date: Thu Jan 01 00:00:00 1970 +0000 | |
261 | summary: add files |
|
261 | summary: add files | |
262 |
|
262 | |||
263 | sharing a largefile repo automatically enables largefiles on the share |
|
263 | sharing a largefile repo automatically enables largefiles on the share | |
264 |
|
264 | |||
265 | $ hg share --config extensions.share= . ../shared_lfrepo |
|
265 | $ hg share --config extensions.share= . ../shared_lfrepo | |
266 | updating working directory |
|
266 | updating working directory | |
267 | getting changed largefiles |
|
267 | getting changed largefiles | |
268 | 1 largefiles updated, 0 removed |
|
268 | 1 largefiles updated, 0 removed | |
269 | 3 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
269 | 3 files updated, 0 files merged, 0 files removed, 0 files unresolved | |
270 | $ hg debugrequires -R ../shared_lfrepo | grep largefiles |
|
270 | $ hg debugrequires -R ../shared_lfrepo | grep largefiles | |
271 | largefiles |
|
271 | largefiles | |
272 |
|
272 | |||
273 | verify that large files in subrepos handled properly |
|
273 | verify that large files in subrepos handled properly | |
274 | $ hg init subrepo |
|
274 | $ hg init subrepo | |
275 | $ echo "subrepo = subrepo" > .hgsub |
|
275 | $ echo "subrepo = subrepo" > .hgsub | |
276 | $ hg add .hgsub |
|
276 | $ hg add .hgsub | |
277 | $ hg ci -m "add subrepo" |
|
277 | $ hg ci -m "add subrepo" | |
278 | Invoking status precommit hook |
|
278 | Invoking status precommit hook | |
279 | A .hgsub |
|
279 | A .hgsub | |
280 | ? .hgsubstate |
|
280 | ? .hgsubstate | |
281 | $ echo "rev 1" > subrepo/large.txt |
|
281 | $ echo "rev 1" > subrepo/large.txt | |
282 | $ hg add --large subrepo/large.txt |
|
282 | $ hg add --large subrepo/large.txt | |
283 | $ hg sum |
|
283 | $ hg sum | |
284 | parent: 1:8ee150ea2e9c tip |
|
284 | parent: 1:8ee150ea2e9c tip | |
285 | add subrepo |
|
285 | add subrepo | |
286 | branch: default |
|
286 | branch: default | |
287 | commit: 1 subrepos |
|
287 | commit: 1 subrepos | |
288 | update: (current) |
|
288 | update: (current) | |
289 | phases: 2 draft |
|
289 | phases: 2 draft | |
290 | $ hg st |
|
290 | $ hg st | |
291 | $ hg st -S |
|
291 | $ hg st -S | |
292 | A subrepo/large.txt |
|
292 | A subrepo/large.txt | |
293 | $ hg ci -S -m "commit top repo" |
|
293 | $ hg ci -S -m "commit top repo" | |
294 | committing subrepository subrepo |
|
294 | committing subrepository subrepo | |
295 | Invoking status precommit hook |
|
295 | Invoking status precommit hook | |
296 | A large.txt |
|
296 | A large.txt | |
297 | Invoking status precommit hook |
|
297 | Invoking status precommit hook | |
298 | M .hgsubstate |
|
298 | M .hgsubstate | |
299 | # No differences |
|
299 | # No differences | |
300 | $ hg st -S |
|
300 | $ hg st -S | |
301 | $ hg sum |
|
301 | $ hg sum | |
302 | parent: 2:ce4cd0c527a6 tip |
|
302 | parent: 2:ce4cd0c527a6 tip | |
303 | commit top repo |
|
303 | commit top repo | |
304 | branch: default |
|
304 | branch: default | |
305 | commit: (clean) |
|
305 | commit: (clean) | |
306 | update: (current) |
|
306 | update: (current) | |
307 | phases: 3 draft |
|
307 | phases: 3 draft | |
308 | $ echo "rev 2" > subrepo/large.txt |
|
308 | $ echo "rev 2" > subrepo/large.txt | |
309 | $ hg st -S |
|
309 | $ hg st -S | |
310 | M subrepo/large.txt |
|
310 | M subrepo/large.txt | |
311 | $ hg sum |
|
311 | $ hg sum | |
312 | parent: 2:ce4cd0c527a6 tip |
|
312 | parent: 2:ce4cd0c527a6 tip | |
313 | commit top repo |
|
313 | commit top repo | |
314 | branch: default |
|
314 | branch: default | |
315 | commit: 1 subrepos |
|
315 | commit: 1 subrepos | |
316 | update: (current) |
|
316 | update: (current) | |
317 | phases: 3 draft |
|
317 | phases: 3 draft | |
318 | $ hg ci -m "this commit should fail without -S" |
|
318 | $ hg ci -m "this commit should fail without -S" | |
319 | abort: uncommitted changes in subrepository "subrepo" |
|
319 | abort: uncommitted changes in subrepository "subrepo" | |
320 | (use --subrepos for recursive commit) |
|
320 | (use --subrepos for recursive commit) | |
321 | [255] |
|
321 | [255] | |
322 |
|
322 | |||
323 | Add a normal file to the subrepo, then test archiving |
|
323 | Add a normal file to the subrepo, then test archiving | |
324 |
|
324 | |||
325 | $ echo 'normal file' > subrepo/normal.txt |
|
325 | $ echo 'normal file' > subrepo/normal.txt | |
326 | $ touch large.dat |
|
326 | $ touch large.dat | |
327 | $ mv subrepo/large.txt subrepo/renamed-large.txt |
|
327 | $ mv subrepo/large.txt subrepo/renamed-large.txt | |
328 | $ hg addremove -S --dry-run |
|
328 | $ hg addremove -S --dry-run | |
329 | adding large.dat as a largefile |
|
329 | adding large.dat as a largefile | |
330 | removing subrepo/large.txt |
|
330 | removing subrepo/large.txt | |
331 | adding subrepo/normal.txt |
|
331 | adding subrepo/normal.txt | |
332 | adding subrepo/renamed-large.txt |
|
332 | adding subrepo/renamed-large.txt | |
333 | $ hg status -S |
|
333 | $ hg status -S | |
334 | ! subrepo/large.txt |
|
334 | ! subrepo/large.txt | |
335 | ? large.dat |
|
335 | ? large.dat | |
336 | ? subrepo/normal.txt |
|
336 | ? subrepo/normal.txt | |
337 | ? subrepo/renamed-large.txt |
|
337 | ? subrepo/renamed-large.txt | |
338 |
|
338 | |||
339 | $ hg addremove --dry-run subrepo |
|
339 | $ hg addremove --dry-run subrepo | |
340 | removing subrepo/large.txt |
|
340 | removing subrepo/large.txt | |
341 | adding subrepo/normal.txt |
|
341 | adding subrepo/normal.txt | |
342 | adding subrepo/renamed-large.txt |
|
342 | adding subrepo/renamed-large.txt | |
343 | $ hg status -S |
|
343 | $ hg status -S | |
344 | ! subrepo/large.txt |
|
344 | ! subrepo/large.txt | |
345 | ? large.dat |
|
345 | ? large.dat | |
346 | ? subrepo/normal.txt |
|
346 | ? subrepo/normal.txt | |
347 | ? subrepo/renamed-large.txt |
|
347 | ? subrepo/renamed-large.txt | |
348 | $ cd .. |
|
348 | $ cd .. | |
349 |
|
349 | |||
350 | $ hg -R statusmatch addremove --dry-run statusmatch/subrepo |
|
350 | $ hg -R statusmatch addremove --dry-run statusmatch/subrepo | |
351 | removing statusmatch/subrepo/large.txt |
|
351 | removing statusmatch/subrepo/large.txt | |
352 | adding statusmatch/subrepo/normal.txt |
|
352 | adding statusmatch/subrepo/normal.txt | |
353 | adding statusmatch/subrepo/renamed-large.txt |
|
353 | adding statusmatch/subrepo/renamed-large.txt | |
354 | $ hg -R statusmatch status -S |
|
354 | $ hg -R statusmatch status -S | |
355 | ! subrepo/large.txt |
|
355 | ! subrepo/large.txt | |
356 | ? large.dat |
|
356 | ? large.dat | |
357 | ? subrepo/normal.txt |
|
357 | ? subrepo/normal.txt | |
358 | ? subrepo/renamed-large.txt |
|
358 | ? subrepo/renamed-large.txt | |
359 |
|
359 | |||
360 | $ hg -R statusmatch addremove --dry-run -S |
|
360 | $ hg -R statusmatch addremove --dry-run -S | |
361 | adding large.dat as a largefile |
|
361 | adding large.dat as a largefile | |
362 | removing subrepo/large.txt |
|
362 | removing subrepo/large.txt | |
363 | adding subrepo/normal.txt |
|
363 | adding subrepo/normal.txt | |
364 | adding subrepo/renamed-large.txt |
|
364 | adding subrepo/renamed-large.txt | |
365 | $ cd statusmatch |
|
365 | $ cd statusmatch | |
366 |
|
366 | |||
367 | $ mv subrepo/renamed-large.txt subrepo/large.txt |
|
367 | $ mv subrepo/renamed-large.txt subrepo/large.txt | |
368 | $ hg addremove subrepo |
|
368 | $ hg addremove subrepo | |
369 | adding subrepo/normal.txt |
|
369 | adding subrepo/normal.txt | |
370 | $ hg forget subrepo/normal.txt |
|
370 | $ hg forget subrepo/normal.txt | |
371 |
|
371 | |||
372 | $ hg addremove -S |
|
372 | $ hg addremove -S | |
373 | adding large.dat as a largefile |
|
373 | adding large.dat as a largefile | |
374 | adding subrepo/normal.txt |
|
374 | adding subrepo/normal.txt | |
375 | $ rm large.dat |
|
375 | $ rm large.dat | |
376 |
|
376 | |||
377 | $ hg addremove subrepo |
|
377 | $ hg addremove subrepo | |
378 | $ hg addremove -S |
|
378 | $ hg addremove -S | |
379 | removing large.dat |
|
379 | removing large.dat | |
380 |
|
380 | |||
381 | Lock in subrepo, otherwise the change isn't archived |
|
381 | Lock in subrepo, otherwise the change isn't archived | |
382 |
|
382 | |||
383 | $ hg ci -S -m "add normal file to top level" |
|
383 | $ hg ci -S -m "add normal file to top level" | |
384 | committing subrepository subrepo |
|
384 | committing subrepository subrepo | |
385 | Invoking status precommit hook |
|
385 | Invoking status precommit hook | |
386 | M large.txt |
|
386 | M large.txt | |
387 | A normal.txt |
|
387 | A normal.txt | |
388 | Invoking status precommit hook |
|
388 | Invoking status precommit hook | |
389 | M .hgsubstate |
|
389 | M .hgsubstate | |
390 | $ hg archive -S ../lf_subrepo_archive |
|
390 | $ hg archive -S ../lf_subrepo_archive | |
391 | $ find ../lf_subrepo_archive | sort |
|
391 | $ find ../lf_subrepo_archive | sort | |
392 | ../lf_subrepo_archive |
|
392 | ../lf_subrepo_archive | |
393 | ../lf_subrepo_archive/.hg_archival.txt |
|
393 | ../lf_subrepo_archive/.hg_archival.txt | |
394 | ../lf_subrepo_archive/.hgsub |
|
394 | ../lf_subrepo_archive/.hgsub | |
395 | ../lf_subrepo_archive/.hgsubstate |
|
395 | ../lf_subrepo_archive/.hgsubstate | |
396 | ../lf_subrepo_archive/a |
|
396 | ../lf_subrepo_archive/a | |
397 | ../lf_subrepo_archive/a/b |
|
397 | ../lf_subrepo_archive/a/b | |
398 | ../lf_subrepo_archive/a/b/c |
|
398 | ../lf_subrepo_archive/a/b/c | |
399 | ../lf_subrepo_archive/a/b/c/d |
|
399 | ../lf_subrepo_archive/a/b/c/d | |
400 | ../lf_subrepo_archive/a/b/c/d/e.large.txt |
|
400 | ../lf_subrepo_archive/a/b/c/d/e.large.txt | |
401 | ../lf_subrepo_archive/a/b/c/d/e.normal.txt |
|
401 | ../lf_subrepo_archive/a/b/c/d/e.normal.txt | |
402 | ../lf_subrepo_archive/a/b/c/x |
|
402 | ../lf_subrepo_archive/a/b/c/x | |
403 | ../lf_subrepo_archive/a/b/c/x/y.normal.txt |
|
403 | ../lf_subrepo_archive/a/b/c/x/y.normal.txt | |
404 | ../lf_subrepo_archive/subrepo |
|
404 | ../lf_subrepo_archive/subrepo | |
405 | ../lf_subrepo_archive/subrepo/large.txt |
|
405 | ../lf_subrepo_archive/subrepo/large.txt | |
406 | ../lf_subrepo_archive/subrepo/normal.txt |
|
406 | ../lf_subrepo_archive/subrepo/normal.txt | |
407 | $ cat ../lf_subrepo_archive/.hg_archival.txt |
|
407 | $ cat ../lf_subrepo_archive/.hg_archival.txt | |
408 | repo: 41bd42f10efa43698cc02052ea0977771cba506d |
|
408 | repo: 41bd42f10efa43698cc02052ea0977771cba506d | |
409 | node: d56a95e6522858bc08a724c4fe2bdee066d1c30b |
|
409 | node: d56a95e6522858bc08a724c4fe2bdee066d1c30b | |
410 | branch: default |
|
410 | branch: default | |
411 | latesttag: null |
|
411 | latesttag: null | |
412 | latesttagdistance: 4 |
|
412 | latesttagdistance: 4 | |
413 | changessincelatesttag: 4 |
|
413 | changessincelatesttag: 4 | |
414 |
|
414 | |||
415 | Test update with subrepos. |
|
415 | Test update with subrepos. | |
416 |
|
416 | |||
417 | $ hg update 0 |
|
417 | $ hg update 0 | |
418 | getting changed largefiles |
|
418 | getting changed largefiles | |
419 | 0 largefiles updated, 1 removed |
|
419 | 0 largefiles updated, 1 removed | |
420 | 0 files updated, 0 files merged, 2 files removed, 0 files unresolved |
|
420 | 0 files updated, 0 files merged, 2 files removed, 0 files unresolved | |
421 | $ hg status -S |
|
421 | $ hg status -S | |
422 | $ hg update tip |
|
422 | $ hg update tip | |
423 | getting changed largefiles |
|
423 | getting changed largefiles | |
424 | 1 largefiles updated, 0 removed |
|
424 | 1 largefiles updated, 0 removed | |
425 | 2 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
425 | 2 files updated, 0 files merged, 0 files removed, 0 files unresolved | |
426 | $ hg status -S |
|
426 | $ hg status -S | |
427 | # modify a large file |
|
427 | # modify a large file | |
428 | $ echo "modified" > subrepo/large.txt |
|
428 | $ echo "modified" > subrepo/large.txt | |
429 | $ hg st -S |
|
429 | $ hg st -S | |
430 | M subrepo/large.txt |
|
430 | M subrepo/large.txt | |
431 | # update -C should revert the change. |
|
431 | # update -C should revert the change. | |
432 | $ hg update -C |
|
432 | $ hg update -C | |
433 | getting changed largefiles |
|
433 | getting changed largefiles | |
434 | 1 largefiles updated, 0 removed |
|
434 | 1 largefiles updated, 0 removed | |
435 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
435 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved | |
436 | $ hg status -S |
|
436 | $ hg status -S | |
437 |
|
437 | |||
438 | Forget doesn't change the content of the file |
|
438 | Forget doesn't change the content of the file | |
439 | $ echo 'pre-forget content' > subrepo/large.txt |
|
439 | $ echo 'pre-forget content' > subrepo/large.txt | |
440 | $ hg forget -v subrepo/large.txt |
|
440 | $ hg forget -v subrepo/large.txt | |
441 | removing subrepo/large.txt |
|
441 | removing subrepo/large.txt | |
442 | $ cat subrepo/large.txt |
|
442 | $ cat subrepo/large.txt | |
443 | pre-forget content |
|
443 | pre-forget content | |
444 |
|
444 | |||
445 | Test reverting a forgotten file |
|
445 | Test reverting a forgotten file | |
446 | $ hg revert -R subrepo subrepo/large.txt |
|
446 | $ hg revert -R subrepo subrepo/large.txt | |
447 | $ hg status -SA subrepo/large.txt |
|
447 | $ hg status -SA subrepo/large.txt | |
448 | C subrepo/large.txt |
|
448 | C subrepo/large.txt | |
449 |
|
449 | |||
450 | $ hg rm -v subrepo/large.txt |
|
450 | $ hg rm -v subrepo/large.txt | |
451 | removing subrepo/large.txt |
|
451 | removing subrepo/large.txt | |
452 | $ hg revert -R subrepo subrepo/large.txt |
|
452 | $ hg revert -R subrepo subrepo/large.txt | |
453 | $ rm subrepo/large.txt |
|
453 | $ rm subrepo/large.txt | |
454 | $ hg addremove -S |
|
454 | $ hg addremove -S | |
455 | removing subrepo/large.txt |
|
455 | removing subrepo/large.txt | |
456 | $ hg st -S |
|
456 | $ hg st -S | |
457 | R subrepo/large.txt |
|
457 | R subrepo/large.txt | |
458 |
|
458 | |||
459 | Test archiving a revision that references a subrepo that is not yet |
|
459 | Test archiving a revision that references a subrepo that is not yet | |
460 | cloned (see test-subrepo-recursion.t): |
|
460 | cloned (see test-subrepo-recursion.t): | |
461 |
|
461 | |||
462 | $ hg clone -U . ../empty |
|
462 | $ hg clone -U . ../empty | |
463 | $ cd ../empty |
|
463 | $ cd ../empty | |
464 | $ hg archive --subrepos -r tip ../archive.tar.gz |
|
464 | $ hg archive --subrepos -r tip ../archive.tar.gz | |
465 | cloning subrepo subrepo from $TESTTMP/statusmatch/subrepo |
|
465 | cloning subrepo subrepo from $TESTTMP/statusmatch/subrepo | |
466 | $ cd .. |
|
466 | $ cd .. | |
467 |
|
467 | |||
468 |
|
468 | |||
469 |
|
469 | |||
470 |
|
470 | |||
471 |
|
471 | |||
472 |
|
472 | |||
473 | Test addremove, forget and others |
|
473 | Test addremove, forget and others | |
474 | ============================================== |
|
474 | ============================================== | |
475 |
|
475 | |||
476 | Test that addremove picks up largefiles prior to the initial commit (issue3541) |
|
476 | Test that addremove picks up largefiles prior to the initial commit (issue3541) | |
477 |
|
477 | |||
478 | $ hg init addrm2 |
|
478 | $ hg init addrm2 | |
479 | $ cd addrm2 |
|
479 | $ cd addrm2 | |
480 | $ touch large.dat |
|
480 | $ touch large.dat | |
481 | $ touch large2.dat |
|
481 | $ touch large2.dat | |
482 | $ touch normal |
|
482 | $ touch normal | |
483 | $ hg add --large large.dat |
|
483 | $ hg add --large large.dat | |
484 | $ hg addremove -v |
|
484 | $ hg addremove -v | |
485 | adding large2.dat as a largefile |
|
485 | adding large2.dat as a largefile | |
486 | adding normal |
|
486 | adding normal | |
487 |
|
487 | |||
488 | Test that forgetting all largefiles reverts to islfilesrepo() == False |
|
488 | Test that forgetting all largefiles reverts to islfilesrepo() == False | |
489 | (addremove will add *.dat as normal files now) |
|
489 | (addremove will add *.dat as normal files now) | |
490 | $ hg forget large.dat |
|
490 | $ hg forget large.dat | |
491 | $ hg forget large2.dat |
|
491 | $ hg forget large2.dat | |
492 | $ hg addremove -v |
|
492 | $ hg addremove -v | |
493 | adding large.dat |
|
493 | adding large.dat | |
494 | adding large2.dat |
|
494 | adding large2.dat | |
495 |
|
495 | |||
496 | Test commit's addremove option prior to the first commit |
|
496 | Test commit's addremove option prior to the first commit | |
497 | $ hg forget large.dat |
|
497 | $ hg forget large.dat | |
498 | $ hg forget large2.dat |
|
498 | $ hg forget large2.dat | |
499 | $ hg add --large large.dat |
|
499 | $ hg add --large large.dat | |
500 | $ hg ci -Am "commit" |
|
500 | $ hg ci -Am "commit" | |
501 | adding large2.dat as a largefile |
|
501 | adding large2.dat as a largefile | |
502 | Invoking status precommit hook |
|
502 | Invoking status precommit hook | |
503 | A large.dat |
|
503 | A large.dat | |
504 | A large2.dat |
|
504 | A large2.dat | |
505 | A normal |
|
505 | A normal | |
506 | $ find .hglf | sort |
|
506 | $ find .hglf | sort | |
507 | .hglf |
|
507 | .hglf | |
508 | .hglf/large.dat |
|
508 | .hglf/large.dat | |
509 | .hglf/large2.dat |
|
509 | .hglf/large2.dat | |
510 |
|
510 | |||
511 | Test actions on largefiles using relative paths from subdir |
|
511 | Test actions on largefiles using relative paths from subdir | |
512 |
|
512 | |||
513 | $ mkdir sub |
|
513 | $ mkdir sub | |
514 | $ cd sub |
|
514 | $ cd sub | |
515 | $ echo anotherlarge > anotherlarge |
|
515 | $ echo anotherlarge > anotherlarge | |
516 | $ hg add --large anotherlarge |
|
516 | $ hg add --large anotherlarge | |
517 | $ hg st |
|
517 | $ hg st | |
518 | A sub/anotherlarge |
|
518 | A sub/anotherlarge | |
519 | $ hg st anotherlarge |
|
519 | $ hg st anotherlarge | |
520 | A anotherlarge |
|
520 | A anotherlarge | |
521 | $ hg commit -m anotherlarge anotherlarge |
|
521 | $ hg commit -m anotherlarge anotherlarge | |
522 | Invoking status precommit hook |
|
522 | Invoking status precommit hook | |
523 | A sub/anotherlarge |
|
523 | A sub/anotherlarge | |
524 | $ hg log anotherlarge |
|
524 | $ hg log anotherlarge | |
525 | changeset: 1:9627a577c5e9 |
|
525 | changeset: 1:9627a577c5e9 | |
526 | tag: tip |
|
526 | tag: tip | |
527 | user: test |
|
527 | user: test | |
528 | date: Thu Jan 01 00:00:00 1970 +0000 |
|
528 | date: Thu Jan 01 00:00:00 1970 +0000 | |
529 | summary: anotherlarge |
|
529 | summary: anotherlarge | |
530 |
|
530 | |||
531 | $ hg --debug log -T '{rev}: {desc}\n' ../sub/anotherlarge |
|
531 | $ hg --debug log -T '{rev}: {desc}\n' ../sub/anotherlarge | |
532 | updated patterns: ../.hglf/sub/../sub/anotherlarge, ../sub/anotherlarge |
|
532 | updated patterns: ../.hglf/sub/../sub/anotherlarge, ../sub/anotherlarge | |
533 | 1: anotherlarge |
|
533 | 1: anotherlarge | |
534 |
|
534 | |||
535 | $ hg log -G anotherlarge |
|
535 | $ hg log -G anotherlarge | |
536 | @ changeset: 1:9627a577c5e9 |
|
536 | @ changeset: 1:9627a577c5e9 | |
537 | | tag: tip |
|
537 | | tag: tip | |
538 | ~ user: test |
|
538 | ~ user: test | |
539 | date: Thu Jan 01 00:00:00 1970 +0000 |
|
539 | date: Thu Jan 01 00:00:00 1970 +0000 | |
540 | summary: anotherlarge |
|
540 | summary: anotherlarge | |
541 |
|
541 | |||
542 |
|
542 | |||
543 | $ hg log glob:another* |
|
543 | $ hg log glob:another* | |
544 | changeset: 1:9627a577c5e9 |
|
544 | changeset: 1:9627a577c5e9 | |
545 | tag: tip |
|
545 | tag: tip | |
546 | user: test |
|
546 | user: test | |
547 | date: Thu Jan 01 00:00:00 1970 +0000 |
|
547 | date: Thu Jan 01 00:00:00 1970 +0000 | |
548 | summary: anotherlarge |
|
548 | summary: anotherlarge | |
549 |
|
549 | |||
550 | $ hg --debug log -T '{rev}: {desc}\n' -G glob:another* |
|
550 | $ hg --debug log -T '{rev}: {desc}\n' -G glob:another* | |
551 | updated patterns: glob:../.hglf/sub/another*, glob:another* |
|
551 | updated patterns: glob:../.hglf/sub/another*, glob:another* | |
552 | @ 1: anotherlarge |
|
552 | @ 1: anotherlarge | |
553 | | |
|
553 | | | |
554 | ~ |
|
554 | ~ | |
555 |
|
555 | |||
556 | #if no-msys |
|
556 | #if no-msys | |
557 | $ hg --debug log -T '{rev}: {desc}\n' 'glob:../.hglf/sub/another*' # no-msys |
|
557 | $ hg --debug log -T '{rev}: {desc}\n' 'glob:../.hglf/sub/another*' # no-msys | |
558 | updated patterns: glob:../.hglf/sub/another* |
|
558 | updated patterns: glob:../.hglf/sub/another* | |
559 | 1: anotherlarge |
|
559 | 1: anotherlarge | |
560 |
|
560 | |||
561 | $ hg --debug log -G -T '{rev}: {desc}\n' 'glob:../.hglf/sub/another*' # no-msys |
|
561 | $ hg --debug log -G -T '{rev}: {desc}\n' 'glob:../.hglf/sub/another*' # no-msys | |
562 | updated patterns: glob:../.hglf/sub/another* |
|
562 | updated patterns: glob:../.hglf/sub/another* | |
563 | @ 1: anotherlarge |
|
563 | @ 1: anotherlarge | |
564 | | |
|
564 | | | |
565 | ~ |
|
565 | ~ | |
566 | #endif |
|
566 | #endif | |
567 |
|
567 | |||
568 | $ echo more >> anotherlarge |
|
568 | $ echo more >> anotherlarge | |
569 | $ hg st . |
|
569 | $ hg st . | |
570 | M anotherlarge |
|
570 | M anotherlarge | |
571 | $ hg cat anotherlarge |
|
571 | $ hg cat anotherlarge | |
572 | anotherlarge |
|
572 | anotherlarge | |
573 | $ hg revert anotherlarge |
|
573 | $ hg revert anotherlarge | |
574 | $ hg st |
|
574 | $ hg st | |
575 | ? sub/anotherlarge.orig |
|
575 | ? sub/anotherlarge.orig | |
576 |
|
576 | |||
577 | Test orig files go where we want them |
|
577 | Test orig files go where we want them | |
578 | $ echo moremore >> anotherlarge |
|
578 | $ echo moremore >> anotherlarge | |
579 | $ hg revert anotherlarge -v --config 'ui.origbackuppath=.hg/origbackups' |
|
579 | $ hg revert anotherlarge -v --config 'ui.origbackuppath=.hg/origbackups' | |
580 | creating directory: $TESTTMP/addrm2/.hg/origbackups/.hglf/sub |
|
580 | creating directory: $TESTTMP/addrm2/.hg/origbackups/.hglf/sub | |
581 | saving current version of ../.hglf/sub/anotherlarge as ../.hg/origbackups/.hglf/sub/anotherlarge |
|
581 | saving current version of ../.hglf/sub/anotherlarge as ../.hg/origbackups/.hglf/sub/anotherlarge | |
582 | reverting ../.hglf/sub/anotherlarge |
|
582 | reverting ../.hglf/sub/anotherlarge | |
583 | creating directory: $TESTTMP/addrm2/.hg/origbackups/sub |
|
583 | creating directory: $TESTTMP/addrm2/.hg/origbackups/sub | |
584 | found 90c622cf65cebe75c5842f9136c459333faf392e in store |
|
584 | found 90c622cf65cebe75c5842f9136c459333faf392e in store | |
585 | found 90c622cf65cebe75c5842f9136c459333faf392e in store |
|
585 | found 90c622cf65cebe75c5842f9136c459333faf392e in store | |
586 | $ ls ../.hg/origbackups/sub |
|
586 | $ ls ../.hg/origbackups/sub | |
587 | anotherlarge |
|
587 | anotherlarge | |
588 | $ cd .. |
|
588 | $ cd .. | |
589 |
|
589 | |||
590 | Test glob logging from the root dir |
|
590 | Test glob logging from the root dir | |
591 | $ hg log glob:**another* |
|
591 | $ hg log glob:**another* | |
592 | changeset: 1:9627a577c5e9 |
|
592 | changeset: 1:9627a577c5e9 | |
593 | tag: tip |
|
593 | tag: tip | |
594 | user: test |
|
594 | user: test | |
595 | date: Thu Jan 01 00:00:00 1970 +0000 |
|
595 | date: Thu Jan 01 00:00:00 1970 +0000 | |
596 | summary: anotherlarge |
|
596 | summary: anotherlarge | |
597 |
|
597 | |||
598 | $ hg log -G glob:**another* |
|
598 | $ hg log -G glob:**another* | |
599 | @ changeset: 1:9627a577c5e9 |
|
599 | @ changeset: 1:9627a577c5e9 | |
600 | | tag: tip |
|
600 | | tag: tip | |
601 | ~ user: test |
|
601 | ~ user: test | |
602 | date: Thu Jan 01 00:00:00 1970 +0000 |
|
602 | date: Thu Jan 01 00:00:00 1970 +0000 | |
603 | summary: anotherlarge |
|
603 | summary: anotherlarge | |
604 |
|
604 | |||
605 |
|
605 | |||
606 | $ cd .. |
|
606 | $ cd .. | |
607 |
|
607 | |||
608 | Log from outer space |
|
608 | Log from outer space | |
609 | $ hg --debug log -R addrm2 -T '{rev}: {desc}\n' 'addrm2/sub/anotherlarge' |
|
609 | $ hg --debug log -R addrm2 -T '{rev}: {desc}\n' 'addrm2/sub/anotherlarge' | |
610 | updated patterns: addrm2/.hglf/sub/anotherlarge, addrm2/sub/anotherlarge |
|
610 | updated patterns: addrm2/.hglf/sub/anotherlarge, addrm2/sub/anotherlarge | |
611 | 1: anotherlarge |
|
611 | 1: anotherlarge | |
612 | $ hg --debug log -R addrm2 -T '{rev}: {desc}\n' 'addrm2/.hglf/sub/anotherlarge' |
|
612 | $ hg --debug log -R addrm2 -T '{rev}: {desc}\n' 'addrm2/.hglf/sub/anotherlarge' | |
613 | updated patterns: addrm2/.hglf/sub/anotherlarge |
|
613 | updated patterns: addrm2/.hglf/sub/anotherlarge | |
614 | 1: anotherlarge |
|
614 | 1: anotherlarge | |
615 |
|
615 | |||
616 |
|
616 | |||
617 | Check error message while exchange |
|
617 | Check error message while exchange | |
618 | ========================================================= |
|
618 | ========================================================= | |
619 |
|
619 | |||
620 | issue3651: summary/outgoing with largefiles shows "no remote repo" |
|
620 | issue3651: summary/outgoing with largefiles shows "no remote repo" | |
621 | unexpectedly |
|
621 | unexpectedly | |
622 |
|
622 | |||
623 | $ mkdir issue3651 |
|
623 | $ mkdir issue3651 | |
624 | $ cd issue3651 |
|
624 | $ cd issue3651 | |
625 |
|
625 | |||
626 | $ hg init src |
|
626 | $ hg init src | |
627 | $ echo a > src/a |
|
627 | $ echo a > src/a | |
628 | $ hg -R src add --large src/a |
|
628 | $ hg -R src add --large src/a | |
629 | $ hg -R src commit -m '#0' |
|
629 | $ hg -R src commit -m '#0' | |
630 | Invoking status precommit hook |
|
630 | Invoking status precommit hook | |
631 | A a |
|
631 | A a | |
632 |
|
632 | |||
633 | check messages when no remote repository is specified: |
|
633 | check messages when no remote repository is specified: | |
634 | "no remote repo" route for "hg outgoing --large" is not tested here, |
|
634 | "no remote repo" route for "hg outgoing --large" is not tested here, | |
635 | because it can't be reproduced easily. |
|
635 | because it can't be reproduced easily. | |
636 |
|
636 | |||
637 | $ hg init clone1 |
|
637 | $ hg init clone1 | |
638 | $ hg -R clone1 -q pull src |
|
638 | $ hg -R clone1 -q pull src | |
639 | $ hg -R clone1 -q update |
|
639 | $ hg -R clone1 -q update | |
640 | $ hg -R clone1 paths | grep default |
|
640 | $ hg -R clone1 paths | grep default | |
641 | [1] |
|
641 | [1] | |
642 |
|
642 | |||
643 | $ hg -R clone1 summary --large |
|
643 | $ hg -R clone1 summary --large | |
644 | parent: 0:fc0bd45326d3 tip |
|
644 | parent: 0:fc0bd45326d3 tip | |
645 | #0 |
|
645 | #0 | |
646 | branch: default |
|
646 | branch: default | |
647 | commit: (clean) |
|
647 | commit: (clean) | |
648 | update: (current) |
|
648 | update: (current) | |
649 | phases: 1 draft |
|
649 | phases: 1 draft | |
650 | largefiles: (no remote repo) |
|
650 | largefiles: (no remote repo) | |
651 |
|
651 | |||
652 | check messages when there is no files to upload: |
|
652 | check messages when there is no files to upload: | |
653 |
|
653 | |||
654 | $ hg -q clone src clone2 |
|
654 | $ hg -q clone src clone2 | |
655 | $ hg -R clone2 paths | grep default |
|
655 | $ hg -R clone2 paths | grep default | |
656 | default = $TESTTMP/issue3651/src |
|
656 | default = $TESTTMP/issue3651/src | |
657 |
|
657 | |||
658 | $ hg -R clone2 summary --large |
|
658 | $ hg -R clone2 summary --large | |
659 | parent: 0:fc0bd45326d3 tip |
|
659 | parent: 0:fc0bd45326d3 tip | |
660 | #0 |
|
660 | #0 | |
661 | branch: default |
|
661 | branch: default | |
662 | commit: (clean) |
|
662 | commit: (clean) | |
663 | update: (current) |
|
663 | update: (current) | |
664 | phases: 1 draft |
|
664 | phases: 1 draft | |
665 | largefiles: (no files to upload) |
|
665 | largefiles: (no files to upload) | |
666 | $ hg -R clone2 outgoing --large |
|
666 | $ hg -R clone2 outgoing --large | |
667 | comparing with $TESTTMP/issue3651/src |
|
667 | comparing with $TESTTMP/issue3651/src | |
668 | searching for changes |
|
668 | searching for changes | |
669 | no changes found |
|
669 | no changes found | |
670 | largefiles: no files to upload |
|
670 | largefiles: no files to upload | |
671 | [1] |
|
671 | [1] | |
672 |
|
672 | |||
673 | $ hg -R clone2 outgoing --large --graph --template "{rev}" |
|
673 | $ hg -R clone2 outgoing --large --graph --template "{rev}" | |
674 | comparing with $TESTTMP/issue3651/src |
|
674 | comparing with $TESTTMP/issue3651/src | |
675 | searching for changes |
|
675 | searching for changes | |
676 | no changes found |
|
676 | no changes found | |
677 | largefiles: no files to upload |
|
677 | largefiles: no files to upload | |
678 | [1] |
|
678 | [1] | |
679 |
|
679 | |||
680 | check messages when there are files to upload: |
|
680 | check messages when there are files to upload: | |
681 |
|
681 | |||
682 | $ echo b > clone2/b |
|
682 | $ echo b > clone2/b | |
683 | $ hg -R clone2 add --large clone2/b |
|
683 | $ hg -R clone2 add --large clone2/b | |
684 | $ hg -R clone2 commit -m '#1' |
|
684 | $ hg -R clone2 commit -m '#1' | |
685 | Invoking status precommit hook |
|
685 | Invoking status precommit hook | |
686 | A b |
|
686 | A b | |
687 | $ hg -R clone2 summary --large |
|
687 | $ hg -R clone2 summary --large | |
688 | parent: 1:1acbe71ce432 tip |
|
688 | parent: 1:1acbe71ce432 tip | |
689 | #1 |
|
689 | #1 | |
690 | branch: default |
|
690 | branch: default | |
691 | commit: (clean) |
|
691 | commit: (clean) | |
692 | update: (current) |
|
692 | update: (current) | |
693 | phases: 2 draft |
|
693 | phases: 2 draft | |
694 | largefiles: 1 entities for 1 files to upload |
|
694 | largefiles: 1 entities for 1 files to upload | |
695 | $ hg -R clone2 outgoing --large |
|
695 | $ hg -R clone2 outgoing --large | |
696 | comparing with $TESTTMP/issue3651/src |
|
696 | comparing with $TESTTMP/issue3651/src | |
697 | searching for changes |
|
697 | searching for changes | |
698 | changeset: 1:1acbe71ce432 |
|
698 | changeset: 1:1acbe71ce432 | |
699 | tag: tip |
|
699 | tag: tip | |
700 | user: test |
|
700 | user: test | |
701 | date: Thu Jan 01 00:00:00 1970 +0000 |
|
701 | date: Thu Jan 01 00:00:00 1970 +0000 | |
702 | summary: #1 |
|
702 | summary: #1 | |
703 |
|
703 | |||
704 | largefiles to upload (1 entities): |
|
704 | largefiles to upload (1 entities): | |
705 | b |
|
705 | b | |
706 |
|
706 | |||
707 | $ hg -R clone2 outgoing --large --graph --template "{rev}" |
|
707 | $ hg -R clone2 outgoing --large --graph --template "{rev}" | |
708 | comparing with $TESTTMP/issue3651/src |
|
708 | comparing with $TESTTMP/issue3651/src | |
709 | searching for changes |
|
709 | searching for changes | |
710 | @ 1 |
|
710 | @ 1 | |
711 |
|
711 | |||
712 | largefiles to upload (1 entities): |
|
712 | largefiles to upload (1 entities): | |
713 | b |
|
713 | b | |
714 |
|
714 | |||
715 |
|
715 | |||
716 | $ cp clone2/b clone2/b1 |
|
716 | $ cp clone2/b clone2/b1 | |
717 | $ cp clone2/b clone2/b2 |
|
717 | $ cp clone2/b clone2/b2 | |
718 | $ hg -R clone2 add --large clone2/b1 clone2/b2 |
|
718 | $ hg -R clone2 add --large clone2/b1 clone2/b2 | |
719 | $ hg -R clone2 commit -m '#2: add largefiles referring same entity' |
|
719 | $ hg -R clone2 commit -m '#2: add largefiles referring same entity' | |
720 | Invoking status precommit hook |
|
720 | Invoking status precommit hook | |
721 | A b1 |
|
721 | A b1 | |
722 | A b2 |
|
722 | A b2 | |
723 | $ hg -R clone2 summary --large |
|
723 | $ hg -R clone2 summary --large | |
724 | parent: 2:6095d0695d70 tip |
|
724 | parent: 2:6095d0695d70 tip | |
725 | #2: add largefiles referring same entity |
|
725 | #2: add largefiles referring same entity | |
726 | branch: default |
|
726 | branch: default | |
727 | commit: (clean) |
|
727 | commit: (clean) | |
728 | update: (current) |
|
728 | update: (current) | |
729 | phases: 3 draft |
|
729 | phases: 3 draft | |
730 | largefiles: 1 entities for 3 files to upload |
|
730 | largefiles: 1 entities for 3 files to upload | |
731 | $ hg -R clone2 outgoing --large -T "{rev}:{node|short}\n" |
|
731 | $ hg -R clone2 outgoing --large -T "{rev}:{node|short}\n" | |
732 | comparing with $TESTTMP/issue3651/src |
|
732 | comparing with $TESTTMP/issue3651/src | |
733 | searching for changes |
|
733 | searching for changes | |
734 | 1:1acbe71ce432 |
|
734 | 1:1acbe71ce432 | |
735 | 2:6095d0695d70 |
|
735 | 2:6095d0695d70 | |
736 | largefiles to upload (1 entities): |
|
736 | largefiles to upload (1 entities): | |
737 | b |
|
737 | b | |
738 | b1 |
|
738 | b1 | |
739 | b2 |
|
739 | b2 | |
740 |
|
740 | |||
741 | $ hg -R clone2 cat -r 1 clone2/.hglf/b |
|
741 | $ hg -R clone2 cat -r 1 clone2/.hglf/b | |
742 | 89e6c98d92887913cadf06b2adb97f26cde4849b |
|
742 | 89e6c98d92887913cadf06b2adb97f26cde4849b | |
743 | $ hg -R clone2 outgoing --large -T "{rev}:{node|short}\n" --debug --config progress.debug=true |
|
743 | $ hg -R clone2 outgoing --large -T "{rev}:{node|short}\n" --debug --config progress.debug=true | |
744 | comparing with $TESTTMP/issue3651/src |
|
744 | comparing with $TESTTMP/issue3651/src | |
745 | query 1; heads |
|
745 | query 1; heads | |
746 | searching for changes |
|
746 | searching for changes | |
747 | all remote heads known locally |
|
747 | all remote heads known locally | |
748 | 1:1acbe71ce432 |
|
748 | 1:1acbe71ce432 | |
749 | 2:6095d0695d70 |
|
749 | 2:6095d0695d70 | |
750 | finding outgoing largefiles: 0/2 revisions (0.00%) |
|
750 | finding outgoing largefiles: 0/2 revisions (0.00%) | |
751 | finding outgoing largefiles: 1/2 revisions (50.00%) |
|
751 | finding outgoing largefiles: 1/2 revisions (50.00%) | |
752 | largefiles to upload (1 entities): |
|
752 | largefiles to upload (1 entities): | |
753 | b |
|
753 | b | |
754 | 89e6c98d92887913cadf06b2adb97f26cde4849b |
|
754 | 89e6c98d92887913cadf06b2adb97f26cde4849b | |
755 | b1 |
|
755 | b1 | |
756 | 89e6c98d92887913cadf06b2adb97f26cde4849b |
|
756 | 89e6c98d92887913cadf06b2adb97f26cde4849b | |
757 | b2 |
|
757 | b2 | |
758 | 89e6c98d92887913cadf06b2adb97f26cde4849b |
|
758 | 89e6c98d92887913cadf06b2adb97f26cde4849b | |
759 |
|
759 | |||
760 |
|
760 | |||
761 | $ echo bbb > clone2/b |
|
761 | $ echo bbb > clone2/b | |
762 | $ hg -R clone2 commit -m '#3: add new largefile entity as existing file' |
|
762 | $ hg -R clone2 commit -m '#3: add new largefile entity as existing file' | |
763 | Invoking status precommit hook |
|
763 | Invoking status precommit hook | |
764 | M b |
|
764 | M b | |
765 | $ echo bbbb > clone2/b |
|
765 | $ echo bbbb > clone2/b | |
766 | $ hg -R clone2 commit -m '#4: add new largefile entity as existing file' |
|
766 | $ hg -R clone2 commit -m '#4: add new largefile entity as existing file' | |
767 | Invoking status precommit hook |
|
767 | Invoking status precommit hook | |
768 | M b |
|
768 | M b | |
769 | $ cp clone2/b1 clone2/b |
|
769 | $ cp clone2/b1 clone2/b | |
770 | $ hg -R clone2 commit -m '#5: refer existing largefile entity again' |
|
770 | $ hg -R clone2 commit -m '#5: refer existing largefile entity again' | |
771 | Invoking status precommit hook |
|
771 | Invoking status precommit hook | |
772 | M b |
|
772 | M b | |
773 | $ hg -R clone2 summary --large |
|
773 | $ hg -R clone2 summary --large | |
774 | parent: 5:036794ea641c tip |
|
774 | parent: 5:036794ea641c tip | |
775 | #5: refer existing largefile entity again |
|
775 | #5: refer existing largefile entity again | |
776 | branch: default |
|
776 | branch: default | |
777 | commit: (clean) |
|
777 | commit: (clean) | |
778 | update: (current) |
|
778 | update: (current) | |
779 | phases: 6 draft |
|
779 | phases: 6 draft | |
780 | largefiles: 3 entities for 3 files to upload |
|
780 | largefiles: 3 entities for 3 files to upload | |
781 | $ hg -R clone2 outgoing --large -T "{rev}:{node|short}\n" |
|
781 | $ hg -R clone2 outgoing --large -T "{rev}:{node|short}\n" | |
782 | comparing with $TESTTMP/issue3651/src |
|
782 | comparing with $TESTTMP/issue3651/src | |
783 | searching for changes |
|
783 | searching for changes | |
784 | 1:1acbe71ce432 |
|
784 | 1:1acbe71ce432 | |
785 | 2:6095d0695d70 |
|
785 | 2:6095d0695d70 | |
786 | 3:7983dce246cc |
|
786 | 3:7983dce246cc | |
787 | 4:233f12ada4ae |
|
787 | 4:233f12ada4ae | |
788 | 5:036794ea641c |
|
788 | 5:036794ea641c | |
789 | largefiles to upload (3 entities): |
|
789 | largefiles to upload (3 entities): | |
790 | b |
|
790 | b | |
791 | b1 |
|
791 | b1 | |
792 | b2 |
|
792 | b2 | |
793 |
|
793 | |||
794 | $ hg -R clone2 cat -r 3 clone2/.hglf/b |
|
794 | $ hg -R clone2 cat -r 3 clone2/.hglf/b | |
795 | c801c9cfe94400963fcb683246217d5db77f9a9a |
|
795 | c801c9cfe94400963fcb683246217d5db77f9a9a | |
796 | $ hg -R clone2 cat -r 4 clone2/.hglf/b |
|
796 | $ hg -R clone2 cat -r 4 clone2/.hglf/b | |
797 | 13f9ed0898e315bf59dc2973fec52037b6f441a2 |
|
797 | 13f9ed0898e315bf59dc2973fec52037b6f441a2 | |
798 | $ hg -R clone2 outgoing --large -T "{rev}:{node|short}\n" --debug --config progress.debug=true |
|
798 | $ hg -R clone2 outgoing --large -T "{rev}:{node|short}\n" --debug --config progress.debug=true | |
799 | comparing with $TESTTMP/issue3651/src |
|
799 | comparing with $TESTTMP/issue3651/src | |
800 | query 1; heads |
|
800 | query 1; heads | |
801 | searching for changes |
|
801 | searching for changes | |
802 | all remote heads known locally |
|
802 | all remote heads known locally | |
803 | 1:1acbe71ce432 |
|
803 | 1:1acbe71ce432 | |
804 | 2:6095d0695d70 |
|
804 | 2:6095d0695d70 | |
805 | 3:7983dce246cc |
|
805 | 3:7983dce246cc | |
806 | 4:233f12ada4ae |
|
806 | 4:233f12ada4ae | |
807 | 5:036794ea641c |
|
807 | 5:036794ea641c | |
808 | finding outgoing largefiles: 0/5 revisions (0.00%) |
|
808 | finding outgoing largefiles: 0/5 revisions (0.00%) | |
809 | finding outgoing largefiles: 1/5 revisions (20.00%) |
|
809 | finding outgoing largefiles: 1/5 revisions (20.00%) | |
810 | finding outgoing largefiles: 2/5 revisions (40.00%) |
|
810 | finding outgoing largefiles: 2/5 revisions (40.00%) | |
811 | finding outgoing largefiles: 3/5 revisions (60.00%) |
|
811 | finding outgoing largefiles: 3/5 revisions (60.00%) | |
812 | finding outgoing largefiles: 4/5 revisions (80.00%) |
|
812 | finding outgoing largefiles: 4/5 revisions (80.00%) | |
813 | largefiles to upload (3 entities): |
|
813 | largefiles to upload (3 entities): | |
814 | b |
|
814 | b | |
815 | 13f9ed0898e315bf59dc2973fec52037b6f441a2 |
|
815 | 13f9ed0898e315bf59dc2973fec52037b6f441a2 | |
816 | 89e6c98d92887913cadf06b2adb97f26cde4849b |
|
816 | 89e6c98d92887913cadf06b2adb97f26cde4849b | |
817 | c801c9cfe94400963fcb683246217d5db77f9a9a |
|
817 | c801c9cfe94400963fcb683246217d5db77f9a9a | |
818 | b1 |
|
818 | b1 | |
819 | 89e6c98d92887913cadf06b2adb97f26cde4849b |
|
819 | 89e6c98d92887913cadf06b2adb97f26cde4849b | |
820 | b2 |
|
820 | b2 | |
821 | 89e6c98d92887913cadf06b2adb97f26cde4849b |
|
821 | 89e6c98d92887913cadf06b2adb97f26cde4849b | |
822 |
|
822 | |||
823 |
|
823 | |||
824 | Pushing revision #1 causes uploading entity 89e6c98d9288, which is |
|
824 | Pushing revision #1 causes uploading entity 89e6c98d9288, which is | |
825 | shared also by largefiles b1, b2 in revision #2 and b in revision #5. |
|
825 | shared also by largefiles b1, b2 in revision #2 and b in revision #5. | |
826 |
|
826 | |||
827 | Then, entity 89e6c98d9288 is not treated as "outgoing entity" at "hg |
|
827 | Then, entity 89e6c98d9288 is not treated as "outgoing entity" at "hg | |
828 | summary" and "hg outgoing", even though files in outgoing revision #2 |
|
828 | summary" and "hg outgoing", even though files in outgoing revision #2 | |
829 | and #5 refer it. |
|
829 | and #5 refer it. | |
830 |
|
830 | |||
831 | $ hg -R clone2 push -r 1 -q |
|
831 | $ hg -R clone2 push -r 1 -q | |
832 | $ hg -R clone2 summary --large |
|
832 | $ hg -R clone2 summary --large | |
833 | parent: 5:036794ea641c tip |
|
833 | parent: 5:036794ea641c tip | |
834 | #5: refer existing largefile entity again |
|
834 | #5: refer existing largefile entity again | |
835 | branch: default |
|
835 | branch: default | |
836 | commit: (clean) |
|
836 | commit: (clean) | |
837 | update: (current) |
|
837 | update: (current) | |
838 | phases: 6 draft |
|
838 | phases: 6 draft | |
839 | largefiles: 2 entities for 1 files to upload |
|
839 | largefiles: 2 entities for 1 files to upload | |
840 | $ hg -R clone2 outgoing --large -T "{rev}:{node|short}\n" |
|
840 | $ hg -R clone2 outgoing --large -T "{rev}:{node|short}\n" | |
841 | comparing with $TESTTMP/issue3651/src |
|
841 | comparing with $TESTTMP/issue3651/src | |
842 | searching for changes |
|
842 | searching for changes | |
843 | 2:6095d0695d70 |
|
843 | 2:6095d0695d70 | |
844 | 3:7983dce246cc |
|
844 | 3:7983dce246cc | |
845 | 4:233f12ada4ae |
|
845 | 4:233f12ada4ae | |
846 | 5:036794ea641c |
|
846 | 5:036794ea641c | |
847 | largefiles to upload (2 entities): |
|
847 | largefiles to upload (2 entities): | |
848 | b |
|
848 | b | |
849 |
|
849 | |||
850 | $ hg -R clone2 outgoing --large -T "{rev}:{node|short}\n" --debug --config progress.debug=true |
|
850 | $ hg -R clone2 outgoing --large -T "{rev}:{node|short}\n" --debug --config progress.debug=true | |
851 | comparing with $TESTTMP/issue3651/src |
|
851 | comparing with $TESTTMP/issue3651/src | |
852 | query 1; heads |
|
852 | query 1; heads | |
853 | searching for changes |
|
853 | searching for changes | |
854 | all remote heads known locally |
|
854 | all remote heads known locally | |
855 | 2:6095d0695d70 |
|
855 | 2:6095d0695d70 | |
856 | 3:7983dce246cc |
|
856 | 3:7983dce246cc | |
857 | 4:233f12ada4ae |
|
857 | 4:233f12ada4ae | |
858 | 5:036794ea641c |
|
858 | 5:036794ea641c | |
859 | finding outgoing largefiles: 0/4 revisions (0.00%) |
|
859 | finding outgoing largefiles: 0/4 revisions (0.00%) | |
860 | finding outgoing largefiles: 1/4 revisions (25.00%) |
|
860 | finding outgoing largefiles: 1/4 revisions (25.00%) | |
861 | finding outgoing largefiles: 2/4 revisions (50.00%) |
|
861 | finding outgoing largefiles: 2/4 revisions (50.00%) | |
862 | finding outgoing largefiles: 3/4 revisions (75.00%) |
|
862 | finding outgoing largefiles: 3/4 revisions (75.00%) | |
863 | largefiles to upload (2 entities): |
|
863 | largefiles to upload (2 entities): | |
864 | b |
|
864 | b | |
865 | 13f9ed0898e315bf59dc2973fec52037b6f441a2 |
|
865 | 13f9ed0898e315bf59dc2973fec52037b6f441a2 | |
866 | c801c9cfe94400963fcb683246217d5db77f9a9a |
|
866 | c801c9cfe94400963fcb683246217d5db77f9a9a | |
867 |
|
867 | |||
868 |
|
868 | |||
869 | $ cd .. |
|
869 | $ cd .. | |
870 |
|
870 | |||
871 | merge action 'd' for 'local renamed directory to d2/g' which has no filename |
|
871 | merge action 'd' for 'local renamed directory to d2/g' which has no filename | |
872 | ================================================================================== |
|
872 | ================================================================================== | |
873 |
|
873 | |||
874 | $ hg init merge-action |
|
874 | $ hg init merge-action | |
875 | $ cd merge-action |
|
875 | $ cd merge-action | |
876 | $ touch l |
|
876 | $ touch l | |
877 | $ hg add --large l |
|
877 | $ hg add --large l | |
878 | $ mkdir d1 |
|
878 | $ mkdir d1 | |
879 | $ touch d1/f |
|
879 | $ touch d1/f | |
880 | $ hg ci -Aqm0 |
|
880 | $ hg ci -Aqm0 | |
881 | Invoking status precommit hook |
|
881 | Invoking status precommit hook | |
882 | A d1/f |
|
882 | A d1/f | |
883 | A l |
|
883 | A l | |
884 | $ echo > d1/f |
|
884 | $ echo > d1/f | |
885 | $ touch d1/g |
|
885 | $ touch d1/g | |
886 | $ hg ci -Aqm1 |
|
886 | $ hg ci -Aqm1 | |
887 | Invoking status precommit hook |
|
887 | Invoking status precommit hook | |
888 | M d1/f |
|
888 | M d1/f | |
889 | A d1/g |
|
889 | A d1/g | |
890 | $ hg up -qr0 |
|
890 | $ hg up -qr0 | |
891 | $ hg mv d1 d2 |
|
891 | $ hg mv d1 d2 | |
892 | moving d1/f to d2/f |
|
892 | moving d1/f to d2/f | |
893 | $ hg ci -qm2 |
|
893 | $ hg ci -qm2 | |
894 | Invoking status precommit hook |
|
894 | Invoking status precommit hook | |
895 | A d2/f |
|
895 | A d2/f | |
896 | R d1/f |
|
896 | R d1/f | |
897 | $ hg merge |
|
897 | $ hg merge | |
898 | merging d2/f and d1/f to d2/f |
|
898 | merging d2/f and d1/f to d2/f | |
899 | 1 files updated, 1 files merged, 0 files removed, 0 files unresolved |
|
899 | 1 files updated, 1 files merged, 0 files removed, 0 files unresolved | |
900 | (branch merge, don't forget to commit) |
|
900 | (branch merge, don't forget to commit) | |
901 | $ cd .. |
|
901 | $ cd .. | |
902 |
|
902 | |||
903 |
|
903 | |||
904 | Merge conflicts: |
|
904 | Merge conflicts: | |
905 | ===================== |
|
905 | ===================== | |
906 |
|
906 | |||
907 | $ hg init merge |
|
907 | $ hg init merge | |
908 | $ cd merge |
|
908 | $ cd merge | |
909 | $ echo 0 > f-different |
|
909 | $ echo 0 > f-different | |
910 | $ echo 0 > f-same |
|
910 | $ echo 0 > f-same | |
911 | $ echo 0 > f-unchanged-1 |
|
911 | $ echo 0 > f-unchanged-1 | |
912 | $ echo 0 > f-unchanged-2 |
|
912 | $ echo 0 > f-unchanged-2 | |
913 | $ hg add --large * |
|
913 | $ hg add --large * | |
914 | $ hg ci -m0 |
|
914 | $ hg ci -m0 | |
915 | Invoking status precommit hook |
|
915 | Invoking status precommit hook | |
916 | A f-different |
|
916 | A f-different | |
917 | A f-same |
|
917 | A f-same | |
918 | A f-unchanged-1 |
|
918 | A f-unchanged-1 | |
919 | A f-unchanged-2 |
|
919 | A f-unchanged-2 | |
920 | $ echo tmp1 > f-unchanged-1 |
|
920 | $ echo tmp1 > f-unchanged-1 | |
921 | $ echo tmp1 > f-unchanged-2 |
|
921 | $ echo tmp1 > f-unchanged-2 | |
922 | $ echo tmp1 > f-same |
|
922 | $ echo tmp1 > f-same | |
923 | $ hg ci -m1 |
|
923 | $ hg ci -m1 | |
924 | Invoking status precommit hook |
|
924 | Invoking status precommit hook | |
925 | M f-same |
|
925 | M f-same | |
926 | M f-unchanged-1 |
|
926 | M f-unchanged-1 | |
927 | M f-unchanged-2 |
|
927 | M f-unchanged-2 | |
928 | $ echo 2 > f-different |
|
928 | $ echo 2 > f-different | |
929 | $ echo 0 > f-unchanged-1 |
|
929 | $ echo 0 > f-unchanged-1 | |
930 | $ echo 1 > f-unchanged-2 |
|
930 | $ echo 1 > f-unchanged-2 | |
931 | $ echo 1 > f-same |
|
931 | $ echo 1 > f-same | |
932 | $ hg ci -m2 |
|
932 | $ hg ci -m2 | |
933 | Invoking status precommit hook |
|
933 | Invoking status precommit hook | |
934 | M f-different |
|
934 | M f-different | |
935 | M f-same |
|
935 | M f-same | |
936 | M f-unchanged-1 |
|
936 | M f-unchanged-1 | |
937 | M f-unchanged-2 |
|
937 | M f-unchanged-2 | |
938 | $ hg up -qr0 |
|
938 | $ hg up -qr0 | |
939 | $ echo tmp2 > f-unchanged-1 |
|
939 | $ echo tmp2 > f-unchanged-1 | |
940 | $ echo tmp2 > f-unchanged-2 |
|
940 | $ echo tmp2 > f-unchanged-2 | |
941 | $ echo tmp2 > f-same |
|
941 | $ echo tmp2 > f-same | |
942 | $ hg ci -m3 |
|
942 | $ hg ci -m3 | |
943 | Invoking status precommit hook |
|
943 | Invoking status precommit hook | |
944 | M f-same |
|
944 | M f-same | |
945 | M f-unchanged-1 |
|
945 | M f-unchanged-1 | |
946 | M f-unchanged-2 |
|
946 | M f-unchanged-2 | |
947 | created new head |
|
947 | created new head | |
948 | $ echo 1 > f-different |
|
948 | $ echo 1 > f-different | |
949 | $ echo 1 > f-unchanged-1 |
|
949 | $ echo 1 > f-unchanged-1 | |
950 | $ echo 0 > f-unchanged-2 |
|
950 | $ echo 0 > f-unchanged-2 | |
951 | $ echo 1 > f-same |
|
951 | $ echo 1 > f-same | |
952 | $ hg ci -m4 |
|
952 | $ hg ci -m4 | |
953 | Invoking status precommit hook |
|
953 | Invoking status precommit hook | |
954 | M f-different |
|
954 | M f-different | |
955 | M f-same |
|
955 | M f-same | |
956 | M f-unchanged-1 |
|
956 | M f-unchanged-1 | |
957 | M f-unchanged-2 |
|
957 | M f-unchanged-2 | |
958 | $ hg merge |
|
958 | $ hg merge | |
959 | largefile f-different has a merge conflict |
|
959 | largefile f-different has a merge conflict | |
960 | ancestor was 09d2af8dd22201dd8d48e5dcfcaed281ff9422c7 |
|
960 | ancestor was 09d2af8dd22201dd8d48e5dcfcaed281ff9422c7 | |
961 | you can keep (l)ocal e5fa44f2b31c1fb553b6021e7360d07d5d91ff5e or take (o)ther 7448d8798a4380162d4b56f9b452e2f6f9e24e7a. |
|
961 | you can keep (l)ocal e5fa44f2b31c1fb553b6021e7360d07d5d91ff5e or take (o)ther 7448d8798a4380162d4b56f9b452e2f6f9e24e7a. | |
962 | what do you want to do? l |
|
962 | what do you want to do? l | |
963 | getting changed largefiles |
|
963 | getting changed largefiles | |
964 | 1 largefiles updated, 0 removed |
|
964 | 1 largefiles updated, 0 removed | |
965 |
|
|
965 | 1 files updated, 3 files merged, 0 files removed, 0 files unresolved | |
966 | (branch merge, don't forget to commit) |
|
966 | (branch merge, don't forget to commit) | |
967 | $ cat f-different |
|
967 | $ cat f-different | |
968 | 1 |
|
968 | 1 | |
969 | $ cat f-same |
|
969 | $ cat f-same | |
970 | 1 |
|
970 | 1 | |
971 | $ cat f-unchanged-1 |
|
971 | $ cat f-unchanged-1 | |
972 | 1 |
|
972 | 1 | |
973 | $ cat f-unchanged-2 |
|
973 | $ cat f-unchanged-2 | |
974 | 1 |
|
974 | 1 | |
975 | $ cd .. |
|
975 | $ cd .. | |
976 |
|
976 | |||
977 | Test largefile insulation (do not enabled a side effect |
|
977 | Test largefile insulation (do not enabled a side effect | |
978 | ======================================================== |
|
978 | ======================================================== | |
979 |
|
979 | |||
980 | Check whether "largefiles" feature is supported only in repositories |
|
980 | Check whether "largefiles" feature is supported only in repositories | |
981 | enabling largefiles extension. |
|
981 | enabling largefiles extension. | |
982 |
|
982 | |||
983 | $ mkdir individualenabling |
|
983 | $ mkdir individualenabling | |
984 | $ cd individualenabling |
|
984 | $ cd individualenabling | |
985 |
|
985 | |||
986 | $ hg init enabledlocally |
|
986 | $ hg init enabledlocally | |
987 | $ echo large > enabledlocally/large |
|
987 | $ echo large > enabledlocally/large | |
988 | $ hg -R enabledlocally add --large enabledlocally/large |
|
988 | $ hg -R enabledlocally add --large enabledlocally/large | |
989 | $ hg -R enabledlocally commit -m '#0' |
|
989 | $ hg -R enabledlocally commit -m '#0' | |
990 | Invoking status precommit hook |
|
990 | Invoking status precommit hook | |
991 | A large |
|
991 | A large | |
992 |
|
992 | |||
993 | $ hg init notenabledlocally |
|
993 | $ hg init notenabledlocally | |
994 | $ echo large > notenabledlocally/large |
|
994 | $ echo large > notenabledlocally/large | |
995 | $ hg -R notenabledlocally add --large notenabledlocally/large |
|
995 | $ hg -R notenabledlocally add --large notenabledlocally/large | |
996 | $ hg -R notenabledlocally commit -m '#0' |
|
996 | $ hg -R notenabledlocally commit -m '#0' | |
997 | Invoking status precommit hook |
|
997 | Invoking status precommit hook | |
998 | A large |
|
998 | A large | |
999 |
|
999 | |||
1000 | $ cat >> $HGRCPATH <<EOF |
|
1000 | $ cat >> $HGRCPATH <<EOF | |
1001 | > [extensions] |
|
1001 | > [extensions] | |
1002 | > # disable globally |
|
1002 | > # disable globally | |
1003 | > largefiles=! |
|
1003 | > largefiles=! | |
1004 | > EOF |
|
1004 | > EOF | |
1005 | $ cat >> enabledlocally/.hg/hgrc <<EOF |
|
1005 | $ cat >> enabledlocally/.hg/hgrc <<EOF | |
1006 | > [extensions] |
|
1006 | > [extensions] | |
1007 | > # enable locally |
|
1007 | > # enable locally | |
1008 | > largefiles= |
|
1008 | > largefiles= | |
1009 | > EOF |
|
1009 | > EOF | |
1010 | $ hg -R enabledlocally root |
|
1010 | $ hg -R enabledlocally root | |
1011 | $TESTTMP/individualenabling/enabledlocally |
|
1011 | $TESTTMP/individualenabling/enabledlocally | |
1012 | $ hg -R notenabledlocally root |
|
1012 | $ hg -R notenabledlocally root | |
1013 | abort: repository requires features unknown to this Mercurial: largefiles |
|
1013 | abort: repository requires features unknown to this Mercurial: largefiles | |
1014 | (see https://mercurial-scm.org/wiki/MissingRequirement for more information) |
|
1014 | (see https://mercurial-scm.org/wiki/MissingRequirement for more information) | |
1015 | [255] |
|
1015 | [255] | |
1016 |
|
1016 | |||
1017 | $ hg init push-dst |
|
1017 | $ hg init push-dst | |
1018 | $ hg -R enabledlocally push push-dst |
|
1018 | $ hg -R enabledlocally push push-dst | |
1019 | pushing to push-dst |
|
1019 | pushing to push-dst | |
1020 | abort: required features are not supported in the destination: largefiles |
|
1020 | abort: required features are not supported in the destination: largefiles | |
1021 | [255] |
|
1021 | [255] | |
1022 |
|
1022 | |||
1023 | $ hg init pull-src |
|
1023 | $ hg init pull-src | |
1024 | $ hg -R pull-src pull enabledlocally |
|
1024 | $ hg -R pull-src pull enabledlocally | |
1025 | pulling from enabledlocally |
|
1025 | pulling from enabledlocally | |
1026 | abort: required features are not supported in the destination: largefiles |
|
1026 | abort: required features are not supported in the destination: largefiles | |
1027 | [255] |
|
1027 | [255] | |
1028 |
|
1028 | |||
1029 | $ hg clone enabledlocally clone-dst |
|
1029 | $ hg clone enabledlocally clone-dst | |
1030 | abort: repository requires features unknown to this Mercurial: largefiles |
|
1030 | abort: repository requires features unknown to this Mercurial: largefiles | |
1031 | (see https://mercurial-scm.org/wiki/MissingRequirement for more information) |
|
1031 | (see https://mercurial-scm.org/wiki/MissingRequirement for more information) | |
1032 | [255] |
|
1032 | [255] | |
1033 | $ test -d clone-dst |
|
1033 | $ test -d clone-dst | |
1034 | [1] |
|
1034 | [1] | |
1035 | $ hg clone --pull enabledlocally clone-pull-dst |
|
1035 | $ hg clone --pull enabledlocally clone-pull-dst | |
1036 | abort: required features are not supported in the destination: largefiles |
|
1036 | abort: required features are not supported in the destination: largefiles | |
1037 | [255] |
|
1037 | [255] | |
1038 | $ test -d clone-pull-dst |
|
1038 | $ test -d clone-pull-dst | |
1039 | [1] |
|
1039 | [1] | |
1040 |
|
1040 | |||
1041 | #if serve |
|
1041 | #if serve | |
1042 |
|
1042 | |||
1043 | Test largefiles specific peer setup, when largefiles is enabled |
|
1043 | Test largefiles specific peer setup, when largefiles is enabled | |
1044 | locally (issue4109) |
|
1044 | locally (issue4109) | |
1045 |
|
1045 | |||
1046 | $ hg showconfig extensions | grep largefiles |
|
1046 | $ hg showconfig extensions | grep largefiles | |
1047 | extensions.largefiles=! |
|
1047 | extensions.largefiles=! | |
1048 | $ mkdir -p $TESTTMP/individualenabling/usercache |
|
1048 | $ mkdir -p $TESTTMP/individualenabling/usercache | |
1049 |
|
1049 | |||
1050 | $ hg serve -R enabledlocally -d -p $HGPORT --pid-file hg.pid |
|
1050 | $ hg serve -R enabledlocally -d -p $HGPORT --pid-file hg.pid | |
1051 | $ cat hg.pid >> $DAEMON_PIDS |
|
1051 | $ cat hg.pid >> $DAEMON_PIDS | |
1052 |
|
1052 | |||
1053 | $ hg init pull-dst |
|
1053 | $ hg init pull-dst | |
1054 | $ cat > pull-dst/.hg/hgrc <<EOF |
|
1054 | $ cat > pull-dst/.hg/hgrc <<EOF | |
1055 | > [extensions] |
|
1055 | > [extensions] | |
1056 | > # enable locally |
|
1056 | > # enable locally | |
1057 | > largefiles= |
|
1057 | > largefiles= | |
1058 | > [largefiles] |
|
1058 | > [largefiles] | |
1059 | > # ignore system cache to force largefiles specific wire proto access |
|
1059 | > # ignore system cache to force largefiles specific wire proto access | |
1060 | > usercache=$TESTTMP/individualenabling/usercache |
|
1060 | > usercache=$TESTTMP/individualenabling/usercache | |
1061 | > EOF |
|
1061 | > EOF | |
1062 | $ hg -R pull-dst -q pull -u http://localhost:$HGPORT |
|
1062 | $ hg -R pull-dst -q pull -u http://localhost:$HGPORT | |
1063 |
|
1063 | |||
1064 | $ killdaemons.py |
|
1064 | $ killdaemons.py | |
1065 | #endif |
|
1065 | #endif | |
1066 |
|
1066 | |||
1067 | Test overridden functions work correctly even for repos disabling |
|
1067 | Test overridden functions work correctly even for repos disabling | |
1068 | largefiles (issue4547) |
|
1068 | largefiles (issue4547) | |
1069 |
|
1069 | |||
1070 | $ hg showconfig extensions | grep largefiles |
|
1070 | $ hg showconfig extensions | grep largefiles | |
1071 | extensions.largefiles=! |
|
1071 | extensions.largefiles=! | |
1072 |
|
1072 | |||
1073 | (test updating implied by clone) |
|
1073 | (test updating implied by clone) | |
1074 |
|
1074 | |||
1075 | $ hg init enabled-but-no-largefiles |
|
1075 | $ hg init enabled-but-no-largefiles | |
1076 | $ echo normal1 > enabled-but-no-largefiles/normal1 |
|
1076 | $ echo normal1 > enabled-but-no-largefiles/normal1 | |
1077 | $ hg -R enabled-but-no-largefiles add enabled-but-no-largefiles/normal1 |
|
1077 | $ hg -R enabled-but-no-largefiles add enabled-but-no-largefiles/normal1 | |
1078 | $ hg -R enabled-but-no-largefiles commit -m '#0@enabled-but-no-largefiles' |
|
1078 | $ hg -R enabled-but-no-largefiles commit -m '#0@enabled-but-no-largefiles' | |
1079 | Invoking status precommit hook |
|
1079 | Invoking status precommit hook | |
1080 | A normal1 |
|
1080 | A normal1 | |
1081 | $ cat >> enabled-but-no-largefiles/.hg/hgrc <<EOF |
|
1081 | $ cat >> enabled-but-no-largefiles/.hg/hgrc <<EOF | |
1082 | > [extensions] |
|
1082 | > [extensions] | |
1083 | > # enable locally |
|
1083 | > # enable locally | |
1084 | > largefiles= |
|
1084 | > largefiles= | |
1085 | > EOF |
|
1085 | > EOF | |
1086 | $ hg clone -q enabled-but-no-largefiles no-largefiles |
|
1086 | $ hg clone -q enabled-but-no-largefiles no-largefiles | |
1087 |
|
1087 | |||
1088 | $ echo normal2 > enabled-but-no-largefiles/normal2 |
|
1088 | $ echo normal2 > enabled-but-no-largefiles/normal2 | |
1089 | $ hg -R enabled-but-no-largefiles add enabled-but-no-largefiles/normal2 |
|
1089 | $ hg -R enabled-but-no-largefiles add enabled-but-no-largefiles/normal2 | |
1090 | $ hg -R enabled-but-no-largefiles commit -m '#1@enabled-but-no-largefiles' |
|
1090 | $ hg -R enabled-but-no-largefiles commit -m '#1@enabled-but-no-largefiles' | |
1091 | Invoking status precommit hook |
|
1091 | Invoking status precommit hook | |
1092 | A normal2 |
|
1092 | A normal2 | |
1093 |
|
1093 | |||
1094 | $ echo normal3 > no-largefiles/normal3 |
|
1094 | $ echo normal3 > no-largefiles/normal3 | |
1095 | $ hg -R no-largefiles add no-largefiles/normal3 |
|
1095 | $ hg -R no-largefiles add no-largefiles/normal3 | |
1096 | $ hg -R no-largefiles commit -m '#1@no-largefiles' |
|
1096 | $ hg -R no-largefiles commit -m '#1@no-largefiles' | |
1097 | Invoking status precommit hook |
|
1097 | Invoking status precommit hook | |
1098 | A normal3 |
|
1098 | A normal3 | |
1099 |
|
1099 | |||
1100 | $ hg -R no-largefiles -q pull --rebase |
|
1100 | $ hg -R no-largefiles -q pull --rebase | |
1101 | Invoking status precommit hook |
|
1101 | Invoking status precommit hook | |
1102 | A normal3 |
|
1102 | A normal3 | |
1103 |
|
1103 | |||
1104 | (test reverting) |
|
1104 | (test reverting) | |
1105 |
|
1105 | |||
1106 | $ hg init subrepo-root |
|
1106 | $ hg init subrepo-root | |
1107 | $ cat >> subrepo-root/.hg/hgrc <<EOF |
|
1107 | $ cat >> subrepo-root/.hg/hgrc <<EOF | |
1108 | > [extensions] |
|
1108 | > [extensions] | |
1109 | > # enable locally |
|
1109 | > # enable locally | |
1110 | > largefiles= |
|
1110 | > largefiles= | |
1111 | > EOF |
|
1111 | > EOF | |
1112 | $ echo large > subrepo-root/large |
|
1112 | $ echo large > subrepo-root/large | |
1113 | $ mkdir -p subrepo-root/dir/subdir |
|
1113 | $ mkdir -p subrepo-root/dir/subdir | |
1114 | $ echo large2 > subrepo-root/dir/subdir/large.bin |
|
1114 | $ echo large2 > subrepo-root/dir/subdir/large.bin | |
1115 | $ hg -R subrepo-root add --large subrepo-root/large subrepo-root/dir/subdir/large.bin |
|
1115 | $ hg -R subrepo-root add --large subrepo-root/large subrepo-root/dir/subdir/large.bin | |
1116 | $ hg clone -q no-largefiles subrepo-root/no-largefiles |
|
1116 | $ hg clone -q no-largefiles subrepo-root/no-largefiles | |
1117 | $ cat > subrepo-root/.hgsub <<EOF |
|
1117 | $ cat > subrepo-root/.hgsub <<EOF | |
1118 | > no-largefiles = no-largefiles |
|
1118 | > no-largefiles = no-largefiles | |
1119 | > EOF |
|
1119 | > EOF | |
1120 | $ hg -R subrepo-root add subrepo-root/.hgsub |
|
1120 | $ hg -R subrepo-root add subrepo-root/.hgsub | |
1121 | $ hg -R subrepo-root commit -m '#0' |
|
1121 | $ hg -R subrepo-root commit -m '#0' | |
1122 | Invoking status precommit hook |
|
1122 | Invoking status precommit hook | |
1123 | A .hgsub |
|
1123 | A .hgsub | |
1124 | A dir/subdir/large.bin |
|
1124 | A dir/subdir/large.bin | |
1125 | A large |
|
1125 | A large | |
1126 | ? .hgsubstate |
|
1126 | ? .hgsubstate | |
1127 | $ echo dirty >> subrepo-root/large |
|
1127 | $ echo dirty >> subrepo-root/large | |
1128 | $ echo dirty >> subrepo-root/no-largefiles/normal1 |
|
1128 | $ echo dirty >> subrepo-root/no-largefiles/normal1 | |
1129 | $ hg -R subrepo-root status -S |
|
1129 | $ hg -R subrepo-root status -S | |
1130 | M large |
|
1130 | M large | |
1131 | M no-largefiles/normal1 |
|
1131 | M no-largefiles/normal1 | |
1132 | $ hg -R subrepo-root extdiff -p echo -S --config extensions.extdiff= |
|
1132 | $ hg -R subrepo-root extdiff -p echo -S --config extensions.extdiff= | |
1133 | "*\\no-largefiles\\normal1" "*\\no-largefiles\\normal1" (glob) (windows !) |
|
1133 | "*\\no-largefiles\\normal1" "*\\no-largefiles\\normal1" (glob) (windows !) | |
1134 | */no-largefiles/normal1 */no-largefiles/normal1 (glob) (no-windows !) |
|
1134 | */no-largefiles/normal1 */no-largefiles/normal1 (glob) (no-windows !) | |
1135 | [1] |
|
1135 | [1] | |
1136 | $ hg -R subrepo-root revert --all |
|
1136 | $ hg -R subrepo-root revert --all | |
1137 | reverting subrepo-root/.hglf/large |
|
1137 | reverting subrepo-root/.hglf/large | |
1138 | reverting subrepo no-largefiles |
|
1138 | reverting subrepo no-largefiles | |
1139 | reverting subrepo-root/no-largefiles/normal1 |
|
1139 | reverting subrepo-root/no-largefiles/normal1 | |
1140 |
|
1140 | |||
1141 | Move (and then undo) a directory move with only largefiles. |
|
1141 | Move (and then undo) a directory move with only largefiles. | |
1142 |
|
1142 | |||
1143 | $ cd subrepo-root |
|
1143 | $ cd subrepo-root | |
1144 | $ "$PYTHON" $TESTDIR/list-tree.py .hglf dir* large* |
|
1144 | $ "$PYTHON" $TESTDIR/list-tree.py .hglf dir* large* | |
1145 | .hglf/ |
|
1145 | .hglf/ | |
1146 | .hglf/dir/ |
|
1146 | .hglf/dir/ | |
1147 | .hglf/dir/subdir/ |
|
1147 | .hglf/dir/subdir/ | |
1148 | .hglf/dir/subdir/large.bin |
|
1148 | .hglf/dir/subdir/large.bin | |
1149 | .hglf/large |
|
1149 | .hglf/large | |
1150 | dir/ |
|
1150 | dir/ | |
1151 | dir/subdir/ |
|
1151 | dir/subdir/ | |
1152 | dir/subdir/large.bin |
|
1152 | dir/subdir/large.bin | |
1153 | large |
|
1153 | large | |
1154 | large.orig |
|
1154 | large.orig | |
1155 |
|
1155 | |||
1156 | $ hg mv dir/subdir dir/subdir2 |
|
1156 | $ hg mv dir/subdir dir/subdir2 | |
1157 | moving .hglf/dir/subdir/large.bin to .hglf/dir/subdir2/large.bin |
|
1157 | moving .hglf/dir/subdir/large.bin to .hglf/dir/subdir2/large.bin | |
1158 |
|
1158 | |||
1159 | $ "$PYTHON" $TESTDIR/list-tree.py .hglf dir* large* |
|
1159 | $ "$PYTHON" $TESTDIR/list-tree.py .hglf dir* large* | |
1160 | .hglf/ |
|
1160 | .hglf/ | |
1161 | .hglf/dir/ |
|
1161 | .hglf/dir/ | |
1162 | .hglf/dir/subdir2/ |
|
1162 | .hglf/dir/subdir2/ | |
1163 | .hglf/dir/subdir2/large.bin |
|
1163 | .hglf/dir/subdir2/large.bin | |
1164 | .hglf/large |
|
1164 | .hglf/large | |
1165 | dir/ |
|
1165 | dir/ | |
1166 | dir/subdir2/ |
|
1166 | dir/subdir2/ | |
1167 | dir/subdir2/large.bin |
|
1167 | dir/subdir2/large.bin | |
1168 | large |
|
1168 | large | |
1169 | large.orig |
|
1169 | large.orig | |
1170 | $ hg status -C |
|
1170 | $ hg status -C | |
1171 | A dir/subdir2/large.bin |
|
1171 | A dir/subdir2/large.bin | |
1172 | dir/subdir/large.bin |
|
1172 | dir/subdir/large.bin | |
1173 | R dir/subdir/large.bin |
|
1173 | R dir/subdir/large.bin | |
1174 | ? large.orig |
|
1174 | ? large.orig | |
1175 |
|
1175 | |||
1176 | $ echo 'modified' > dir/subdir2/large.bin |
|
1176 | $ echo 'modified' > dir/subdir2/large.bin | |
1177 | $ hg status -C |
|
1177 | $ hg status -C | |
1178 | A dir/subdir2/large.bin |
|
1178 | A dir/subdir2/large.bin | |
1179 | dir/subdir/large.bin |
|
1179 | dir/subdir/large.bin | |
1180 | R dir/subdir/large.bin |
|
1180 | R dir/subdir/large.bin | |
1181 | ? large.orig |
|
1181 | ? large.orig | |
1182 |
|
1182 | |||
1183 | $ hg revert --all |
|
1183 | $ hg revert --all | |
1184 | forgetting .hglf/dir/subdir2/large.bin |
|
1184 | forgetting .hglf/dir/subdir2/large.bin | |
1185 | undeleting .hglf/dir/subdir/large.bin |
|
1185 | undeleting .hglf/dir/subdir/large.bin | |
1186 | reverting subrepo no-largefiles |
|
1186 | reverting subrepo no-largefiles | |
1187 |
|
1187 | |||
1188 | $ hg status -C |
|
1188 | $ hg status -C | |
1189 | ? dir/subdir2/large.bin |
|
1189 | ? dir/subdir2/large.bin | |
1190 | ? large.orig |
|
1190 | ? large.orig | |
1191 |
|
1191 | |||
1192 | The content of the forgotten file shouldn't be clobbered |
|
1192 | The content of the forgotten file shouldn't be clobbered | |
1193 |
|
1193 | |||
1194 | $ cat dir/subdir2/large.bin |
|
1194 | $ cat dir/subdir2/large.bin | |
1195 | modified |
|
1195 | modified | |
1196 |
|
1196 | |||
1197 | The standin for subdir2 should be deleted, not just dropped |
|
1197 | The standin for subdir2 should be deleted, not just dropped | |
1198 |
|
1198 | |||
1199 | $ "$PYTHON" $TESTDIR/list-tree.py .hglf dir* large* |
|
1199 | $ "$PYTHON" $TESTDIR/list-tree.py .hglf dir* large* | |
1200 | .hglf/ |
|
1200 | .hglf/ | |
1201 | .hglf/dir/ |
|
1201 | .hglf/dir/ | |
1202 | .hglf/dir/subdir/ |
|
1202 | .hglf/dir/subdir/ | |
1203 | .hglf/dir/subdir/large.bin |
|
1203 | .hglf/dir/subdir/large.bin | |
1204 | .hglf/large |
|
1204 | .hglf/large | |
1205 | dir/ |
|
1205 | dir/ | |
1206 | dir/subdir/ |
|
1206 | dir/subdir/ | |
1207 | dir/subdir/large.bin |
|
1207 | dir/subdir/large.bin | |
1208 | dir/subdir2/ |
|
1208 | dir/subdir2/ | |
1209 | dir/subdir2/large.bin |
|
1209 | dir/subdir2/large.bin | |
1210 | large |
|
1210 | large | |
1211 | large.orig |
|
1211 | large.orig | |
1212 |
|
1212 | |||
1213 | $ rm -r dir/subdir2 |
|
1213 | $ rm -r dir/subdir2 | |
1214 |
|
1214 | |||
1215 | 'subdir' should not be in the destination. It would be if the subdir2 directory |
|
1215 | 'subdir' should not be in the destination. It would be if the subdir2 directory | |
1216 | existed under .hglf/. |
|
1216 | existed under .hglf/. | |
1217 | $ hg mv dir/subdir dir/subdir2 |
|
1217 | $ hg mv dir/subdir dir/subdir2 | |
1218 | moving .hglf/dir/subdir/large.bin to .hglf/dir/subdir2/large.bin |
|
1218 | moving .hglf/dir/subdir/large.bin to .hglf/dir/subdir2/large.bin | |
1219 |
|
1219 | |||
1220 | $ hg status -C |
|
1220 | $ hg status -C | |
1221 | A dir/subdir2/large.bin |
|
1221 | A dir/subdir2/large.bin | |
1222 | dir/subdir/large.bin |
|
1222 | dir/subdir/large.bin | |
1223 | R dir/subdir/large.bin |
|
1223 | R dir/subdir/large.bin | |
1224 | ? large.orig |
|
1224 | ? large.orig | |
1225 |
|
1225 | |||
1226 | $ "$PYTHON" $TESTDIR/list-tree.py .hglf dir* large* |
|
1226 | $ "$PYTHON" $TESTDIR/list-tree.py .hglf dir* large* | |
1227 | .hglf/ |
|
1227 | .hglf/ | |
1228 | .hglf/dir/ |
|
1228 | .hglf/dir/ | |
1229 | .hglf/dir/subdir2/ |
|
1229 | .hglf/dir/subdir2/ | |
1230 | .hglf/dir/subdir2/large.bin |
|
1230 | .hglf/dir/subdir2/large.bin | |
1231 | .hglf/large |
|
1231 | .hglf/large | |
1232 | dir/ |
|
1232 | dir/ | |
1233 | dir/subdir2/ |
|
1233 | dir/subdir2/ | |
1234 | dir/subdir2/large.bin |
|
1234 | dir/subdir2/large.bin | |
1235 | large |
|
1235 | large | |
1236 | large.orig |
|
1236 | large.orig | |
1237 |
|
1237 | |||
1238 | Start from scratch, and rename something other than the final path component. |
|
1238 | Start from scratch, and rename something other than the final path component. | |
1239 |
|
1239 | |||
1240 | $ hg up -qC . |
|
1240 | $ hg up -qC . | |
1241 | $ hg --config extensions.purge= purge |
|
1241 | $ hg --config extensions.purge= purge | |
1242 |
|
1242 | |||
1243 | $ hg mv dir/subdir dir2/subdir |
|
1243 | $ hg mv dir/subdir dir2/subdir | |
1244 | moving .hglf/dir/subdir/large.bin to .hglf/dir2/subdir/large.bin |
|
1244 | moving .hglf/dir/subdir/large.bin to .hglf/dir2/subdir/large.bin | |
1245 |
|
1245 | |||
1246 | $ hg status -C |
|
1246 | $ hg status -C | |
1247 | A dir2/subdir/large.bin |
|
1247 | A dir2/subdir/large.bin | |
1248 | dir/subdir/large.bin |
|
1248 | dir/subdir/large.bin | |
1249 | R dir/subdir/large.bin |
|
1249 | R dir/subdir/large.bin | |
1250 |
|
1250 | |||
1251 | $ "$PYTHON" $TESTDIR/list-tree.py .hglf dir* large* |
|
1251 | $ "$PYTHON" $TESTDIR/list-tree.py .hglf dir* large* | |
1252 | .hglf/ |
|
1252 | .hglf/ | |
1253 | .hglf/dir2/ |
|
1253 | .hglf/dir2/ | |
1254 | .hglf/dir2/subdir/ |
|
1254 | .hglf/dir2/subdir/ | |
1255 | .hglf/dir2/subdir/large.bin |
|
1255 | .hglf/dir2/subdir/large.bin | |
1256 | .hglf/large |
|
1256 | .hglf/large | |
1257 | dir2/ |
|
1257 | dir2/ | |
1258 | dir2/subdir/ |
|
1258 | dir2/subdir/ | |
1259 | dir2/subdir/large.bin |
|
1259 | dir2/subdir/large.bin | |
1260 | large |
|
1260 | large | |
1261 |
|
1261 | |||
1262 | $ hg revert --all |
|
1262 | $ hg revert --all | |
1263 | forgetting .hglf/dir2/subdir/large.bin |
|
1263 | forgetting .hglf/dir2/subdir/large.bin | |
1264 | undeleting .hglf/dir/subdir/large.bin |
|
1264 | undeleting .hglf/dir/subdir/large.bin | |
1265 | reverting subrepo no-largefiles |
|
1265 | reverting subrepo no-largefiles | |
1266 |
|
1266 | |||
1267 | $ hg status -C |
|
1267 | $ hg status -C | |
1268 | ? dir2/subdir/large.bin |
|
1268 | ? dir2/subdir/large.bin | |
1269 |
|
1269 | |||
1270 | $ "$PYTHON" $TESTDIR/list-tree.py .hglf dir* large* |
|
1270 | $ "$PYTHON" $TESTDIR/list-tree.py .hglf dir* large* | |
1271 | .hglf/ |
|
1271 | .hglf/ | |
1272 | .hglf/dir/ |
|
1272 | .hglf/dir/ | |
1273 | .hglf/dir/subdir/ |
|
1273 | .hglf/dir/subdir/ | |
1274 | .hglf/dir/subdir/large.bin |
|
1274 | .hglf/dir/subdir/large.bin | |
1275 | .hglf/large |
|
1275 | .hglf/large | |
1276 | dir/ |
|
1276 | dir/ | |
1277 | dir/subdir/ |
|
1277 | dir/subdir/ | |
1278 | dir/subdir/large.bin |
|
1278 | dir/subdir/large.bin | |
1279 | dir2/ |
|
1279 | dir2/ | |
1280 | dir2/subdir/ |
|
1280 | dir2/subdir/ | |
1281 | dir2/subdir/large.bin |
|
1281 | dir2/subdir/large.bin | |
1282 | large |
|
1282 | large | |
1283 |
|
1283 | |||
1284 | $ cd ../.. |
|
1284 | $ cd ../.. | |
1285 |
|
1285 | |||
1286 | Test "pull --rebase" when rebase is enabled before largefiles (issue3861) |
|
1286 | Test "pull --rebase" when rebase is enabled before largefiles (issue3861) | |
1287 | ========================================================================= |
|
1287 | ========================================================================= | |
1288 |
|
1288 | |||
1289 | $ hg showconfig extensions | grep largefiles |
|
1289 | $ hg showconfig extensions | grep largefiles | |
1290 | extensions.largefiles=! |
|
1290 | extensions.largefiles=! | |
1291 |
|
1291 | |||
1292 | $ mkdir issue3861 |
|
1292 | $ mkdir issue3861 | |
1293 | $ cd issue3861 |
|
1293 | $ cd issue3861 | |
1294 | $ hg init src |
|
1294 | $ hg init src | |
1295 | $ hg clone -q src dst |
|
1295 | $ hg clone -q src dst | |
1296 | $ echo a > src/a |
|
1296 | $ echo a > src/a | |
1297 | $ hg -R src commit -Aqm "#0" |
|
1297 | $ hg -R src commit -Aqm "#0" | |
1298 | Invoking status precommit hook |
|
1298 | Invoking status precommit hook | |
1299 | A a |
|
1299 | A a | |
1300 |
|
1300 | |||
1301 | $ cat >> dst/.hg/hgrc <<EOF |
|
1301 | $ cat >> dst/.hg/hgrc <<EOF | |
1302 | > [extensions] |
|
1302 | > [extensions] | |
1303 | > largefiles= |
|
1303 | > largefiles= | |
1304 | > EOF |
|
1304 | > EOF | |
1305 | $ hg -R dst pull --rebase |
|
1305 | $ hg -R dst pull --rebase | |
1306 | pulling from $TESTTMP/issue3861/src |
|
1306 | pulling from $TESTTMP/issue3861/src | |
1307 | requesting all changes |
|
1307 | requesting all changes | |
1308 | adding changesets |
|
1308 | adding changesets | |
1309 | adding manifests |
|
1309 | adding manifests | |
1310 | adding file changes |
|
1310 | adding file changes | |
1311 | added 1 changesets with 1 changes to 1 files |
|
1311 | added 1 changesets with 1 changes to 1 files | |
1312 | new changesets bf5e395ced2c (1 drafts) |
|
1312 | new changesets bf5e395ced2c (1 drafts) | |
1313 | nothing to rebase - updating instead |
|
1313 | nothing to rebase - updating instead | |
1314 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
1314 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved | |
1315 |
|
1315 | |||
1316 | $ cd .. |
|
1316 | $ cd .. |
General Comments 0
You need to be logged in to leave comments.
Login now