##// END OF EJS Templates
merge: add a config option to disable path conflict checking...
Siddharth Agarwal -
r34942:37450a12 stable
parent child Browse files
Show More
@@ -1,1143 +1,1146 b''
1 # configitems.py - centralized declaration of configuration option
1 # configitems.py - centralized declaration of configuration option
2 #
2 #
3 # Copyright 2017 Pierre-Yves David <pierre-yves.david@octobus.net>
3 # Copyright 2017 Pierre-Yves David <pierre-yves.david@octobus.net>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import functools
10 import functools
11 import re
11 import re
12
12
13 from . import (
13 from . import (
14 encoding,
14 encoding,
15 error,
15 error,
16 )
16 )
17
17
18 def loadconfigtable(ui, extname, configtable):
18 def loadconfigtable(ui, extname, configtable):
19 """update config item known to the ui with the extension ones"""
19 """update config item known to the ui with the extension ones"""
20 for section, items in configtable.items():
20 for section, items in configtable.items():
21 knownitems = ui._knownconfig.setdefault(section, itemregister())
21 knownitems = ui._knownconfig.setdefault(section, itemregister())
22 knownkeys = set(knownitems)
22 knownkeys = set(knownitems)
23 newkeys = set(items)
23 newkeys = set(items)
24 for key in sorted(knownkeys & newkeys):
24 for key in sorted(knownkeys & newkeys):
25 msg = "extension '%s' overwrite config item '%s.%s'"
25 msg = "extension '%s' overwrite config item '%s.%s'"
26 msg %= (extname, section, key)
26 msg %= (extname, section, key)
27 ui.develwarn(msg, config='warn-config')
27 ui.develwarn(msg, config='warn-config')
28
28
29 knownitems.update(items)
29 knownitems.update(items)
30
30
31 class configitem(object):
31 class configitem(object):
32 """represent a known config item
32 """represent a known config item
33
33
34 :section: the official config section where to find this item,
34 :section: the official config section where to find this item,
35 :name: the official name within the section,
35 :name: the official name within the section,
36 :default: default value for this item,
36 :default: default value for this item,
37 :alias: optional list of tuples as alternatives,
37 :alias: optional list of tuples as alternatives,
38 :generic: this is a generic definition, match name using regular expression.
38 :generic: this is a generic definition, match name using regular expression.
39 """
39 """
40
40
41 def __init__(self, section, name, default=None, alias=(),
41 def __init__(self, section, name, default=None, alias=(),
42 generic=False, priority=0):
42 generic=False, priority=0):
43 self.section = section
43 self.section = section
44 self.name = name
44 self.name = name
45 self.default = default
45 self.default = default
46 self.alias = list(alias)
46 self.alias = list(alias)
47 self.generic = generic
47 self.generic = generic
48 self.priority = priority
48 self.priority = priority
49 self._re = None
49 self._re = None
50 if generic:
50 if generic:
51 self._re = re.compile(self.name)
51 self._re = re.compile(self.name)
52
52
53 class itemregister(dict):
53 class itemregister(dict):
54 """A specialized dictionary that can handle wild-card selection"""
54 """A specialized dictionary that can handle wild-card selection"""
55
55
56 def __init__(self):
56 def __init__(self):
57 super(itemregister, self).__init__()
57 super(itemregister, self).__init__()
58 self._generics = set()
58 self._generics = set()
59
59
60 def update(self, other):
60 def update(self, other):
61 super(itemregister, self).update(other)
61 super(itemregister, self).update(other)
62 self._generics.update(other._generics)
62 self._generics.update(other._generics)
63
63
64 def __setitem__(self, key, item):
64 def __setitem__(self, key, item):
65 super(itemregister, self).__setitem__(key, item)
65 super(itemregister, self).__setitem__(key, item)
66 if item.generic:
66 if item.generic:
67 self._generics.add(item)
67 self._generics.add(item)
68
68
69 def get(self, key):
69 def get(self, key):
70 baseitem = super(itemregister, self).get(key)
70 baseitem = super(itemregister, self).get(key)
71 if baseitem is not None and not baseitem.generic:
71 if baseitem is not None and not baseitem.generic:
72 return baseitem
72 return baseitem
73
73
74 # search for a matching generic item
74 # search for a matching generic item
75 generics = sorted(self._generics, key=(lambda x: (x.priority, x.name)))
75 generics = sorted(self._generics, key=(lambda x: (x.priority, x.name)))
76 for item in generics:
76 for item in generics:
77 # we use 'match' instead of 'search' to make the matching simpler
77 # we use 'match' instead of 'search' to make the matching simpler
78 # for people unfamiliar with regular expression. Having the match
78 # for people unfamiliar with regular expression. Having the match
79 # rooted to the start of the string will produce less surprising
79 # rooted to the start of the string will produce less surprising
80 # result for user writing simple regex for sub-attribute.
80 # result for user writing simple regex for sub-attribute.
81 #
81 #
82 # For example using "color\..*" match produces an unsurprising
82 # For example using "color\..*" match produces an unsurprising
83 # result, while using search could suddenly match apparently
83 # result, while using search could suddenly match apparently
84 # unrelated configuration that happens to contains "color."
84 # unrelated configuration that happens to contains "color."
85 # anywhere. This is a tradeoff where we favor requiring ".*" on
85 # anywhere. This is a tradeoff where we favor requiring ".*" on
86 # some match to avoid the need to prefix most pattern with "^".
86 # some match to avoid the need to prefix most pattern with "^".
87 # The "^" seems more error prone.
87 # The "^" seems more error prone.
88 if item._re.match(key):
88 if item._re.match(key):
89 return item
89 return item
90
90
91 return None
91 return None
92
92
93 coreitems = {}
93 coreitems = {}
94
94
95 def _register(configtable, *args, **kwargs):
95 def _register(configtable, *args, **kwargs):
96 item = configitem(*args, **kwargs)
96 item = configitem(*args, **kwargs)
97 section = configtable.setdefault(item.section, itemregister())
97 section = configtable.setdefault(item.section, itemregister())
98 if item.name in section:
98 if item.name in section:
99 msg = "duplicated config item registration for '%s.%s'"
99 msg = "duplicated config item registration for '%s.%s'"
100 raise error.ProgrammingError(msg % (item.section, item.name))
100 raise error.ProgrammingError(msg % (item.section, item.name))
101 section[item.name] = item
101 section[item.name] = item
102
102
103 # special value for case where the default is derived from other values
103 # special value for case where the default is derived from other values
104 dynamicdefault = object()
104 dynamicdefault = object()
105
105
106 # Registering actual config items
106 # Registering actual config items
107
107
108 def getitemregister(configtable):
108 def getitemregister(configtable):
109 f = functools.partial(_register, configtable)
109 f = functools.partial(_register, configtable)
110 # export pseudo enum as configitem.*
110 # export pseudo enum as configitem.*
111 f.dynamicdefault = dynamicdefault
111 f.dynamicdefault = dynamicdefault
112 return f
112 return f
113
113
114 coreconfigitem = getitemregister(coreitems)
114 coreconfigitem = getitemregister(coreitems)
115
115
116 coreconfigitem('alias', '.*',
116 coreconfigitem('alias', '.*',
117 default=None,
117 default=None,
118 generic=True,
118 generic=True,
119 )
119 )
120 coreconfigitem('annotate', 'nodates',
120 coreconfigitem('annotate', 'nodates',
121 default=False,
121 default=False,
122 )
122 )
123 coreconfigitem('annotate', 'showfunc',
123 coreconfigitem('annotate', 'showfunc',
124 default=False,
124 default=False,
125 )
125 )
126 coreconfigitem('annotate', 'unified',
126 coreconfigitem('annotate', 'unified',
127 default=None,
127 default=None,
128 )
128 )
129 coreconfigitem('annotate', 'git',
129 coreconfigitem('annotate', 'git',
130 default=False,
130 default=False,
131 )
131 )
132 coreconfigitem('annotate', 'ignorews',
132 coreconfigitem('annotate', 'ignorews',
133 default=False,
133 default=False,
134 )
134 )
135 coreconfigitem('annotate', 'ignorewsamount',
135 coreconfigitem('annotate', 'ignorewsamount',
136 default=False,
136 default=False,
137 )
137 )
138 coreconfigitem('annotate', 'ignoreblanklines',
138 coreconfigitem('annotate', 'ignoreblanklines',
139 default=False,
139 default=False,
140 )
140 )
141 coreconfigitem('annotate', 'ignorewseol',
141 coreconfigitem('annotate', 'ignorewseol',
142 default=False,
142 default=False,
143 )
143 )
144 coreconfigitem('annotate', 'nobinary',
144 coreconfigitem('annotate', 'nobinary',
145 default=False,
145 default=False,
146 )
146 )
147 coreconfigitem('annotate', 'noprefix',
147 coreconfigitem('annotate', 'noprefix',
148 default=False,
148 default=False,
149 )
149 )
150 coreconfigitem('auth', 'cookiefile',
150 coreconfigitem('auth', 'cookiefile',
151 default=None,
151 default=None,
152 )
152 )
153 # bookmarks.pushing: internal hack for discovery
153 # bookmarks.pushing: internal hack for discovery
154 coreconfigitem('bookmarks', 'pushing',
154 coreconfigitem('bookmarks', 'pushing',
155 default=list,
155 default=list,
156 )
156 )
157 # bundle.mainreporoot: internal hack for bundlerepo
157 # bundle.mainreporoot: internal hack for bundlerepo
158 coreconfigitem('bundle', 'mainreporoot',
158 coreconfigitem('bundle', 'mainreporoot',
159 default='',
159 default='',
160 )
160 )
161 # bundle.reorder: experimental config
161 # bundle.reorder: experimental config
162 coreconfigitem('bundle', 'reorder',
162 coreconfigitem('bundle', 'reorder',
163 default='auto',
163 default='auto',
164 )
164 )
165 coreconfigitem('censor', 'policy',
165 coreconfigitem('censor', 'policy',
166 default='abort',
166 default='abort',
167 )
167 )
168 coreconfigitem('chgserver', 'idletimeout',
168 coreconfigitem('chgserver', 'idletimeout',
169 default=3600,
169 default=3600,
170 )
170 )
171 coreconfigitem('chgserver', 'skiphash',
171 coreconfigitem('chgserver', 'skiphash',
172 default=False,
172 default=False,
173 )
173 )
174 coreconfigitem('cmdserver', 'log',
174 coreconfigitem('cmdserver', 'log',
175 default=None,
175 default=None,
176 )
176 )
177 coreconfigitem('color', '.*',
177 coreconfigitem('color', '.*',
178 default=None,
178 default=None,
179 generic=True,
179 generic=True,
180 )
180 )
181 coreconfigitem('color', 'mode',
181 coreconfigitem('color', 'mode',
182 default='auto',
182 default='auto',
183 )
183 )
184 coreconfigitem('color', 'pagermode',
184 coreconfigitem('color', 'pagermode',
185 default=dynamicdefault,
185 default=dynamicdefault,
186 )
186 )
187 coreconfigitem('commands', 'show.aliasprefix',
187 coreconfigitem('commands', 'show.aliasprefix',
188 default=list,
188 default=list,
189 )
189 )
190 coreconfigitem('commands', 'status.relative',
190 coreconfigitem('commands', 'status.relative',
191 default=False,
191 default=False,
192 )
192 )
193 coreconfigitem('commands', 'status.skipstates',
193 coreconfigitem('commands', 'status.skipstates',
194 default=[],
194 default=[],
195 )
195 )
196 coreconfigitem('commands', 'status.verbose',
196 coreconfigitem('commands', 'status.verbose',
197 default=False,
197 default=False,
198 )
198 )
199 coreconfigitem('commands', 'update.check',
199 coreconfigitem('commands', 'update.check',
200 default=None,
200 default=None,
201 # Deprecated, remove after 4.4 release
201 # Deprecated, remove after 4.4 release
202 alias=[('experimental', 'updatecheck')]
202 alias=[('experimental', 'updatecheck')]
203 )
203 )
204 coreconfigitem('commands', 'update.requiredest',
204 coreconfigitem('commands', 'update.requiredest',
205 default=False,
205 default=False,
206 )
206 )
207 coreconfigitem('committemplate', '.*',
207 coreconfigitem('committemplate', '.*',
208 default=None,
208 default=None,
209 generic=True,
209 generic=True,
210 )
210 )
211 coreconfigitem('debug', 'dirstate.delaywrite',
211 coreconfigitem('debug', 'dirstate.delaywrite',
212 default=0,
212 default=0,
213 )
213 )
214 coreconfigitem('defaults', '.*',
214 coreconfigitem('defaults', '.*',
215 default=None,
215 default=None,
216 generic=True,
216 generic=True,
217 )
217 )
218 coreconfigitem('devel', 'all-warnings',
218 coreconfigitem('devel', 'all-warnings',
219 default=False,
219 default=False,
220 )
220 )
221 coreconfigitem('devel', 'bundle2.debug',
221 coreconfigitem('devel', 'bundle2.debug',
222 default=False,
222 default=False,
223 )
223 )
224 coreconfigitem('devel', 'cache-vfs',
224 coreconfigitem('devel', 'cache-vfs',
225 default=None,
225 default=None,
226 )
226 )
227 coreconfigitem('devel', 'check-locks',
227 coreconfigitem('devel', 'check-locks',
228 default=False,
228 default=False,
229 )
229 )
230 coreconfigitem('devel', 'check-relroot',
230 coreconfigitem('devel', 'check-relroot',
231 default=False,
231 default=False,
232 )
232 )
233 coreconfigitem('devel', 'default-date',
233 coreconfigitem('devel', 'default-date',
234 default=None,
234 default=None,
235 )
235 )
236 coreconfigitem('devel', 'deprec-warn',
236 coreconfigitem('devel', 'deprec-warn',
237 default=False,
237 default=False,
238 )
238 )
239 coreconfigitem('devel', 'disableloaddefaultcerts',
239 coreconfigitem('devel', 'disableloaddefaultcerts',
240 default=False,
240 default=False,
241 )
241 )
242 coreconfigitem('devel', 'warn-empty-changegroup',
242 coreconfigitem('devel', 'warn-empty-changegroup',
243 default=False,
243 default=False,
244 )
244 )
245 coreconfigitem('devel', 'legacy.exchange',
245 coreconfigitem('devel', 'legacy.exchange',
246 default=list,
246 default=list,
247 )
247 )
248 coreconfigitem('devel', 'servercafile',
248 coreconfigitem('devel', 'servercafile',
249 default='',
249 default='',
250 )
250 )
251 coreconfigitem('devel', 'serverexactprotocol',
251 coreconfigitem('devel', 'serverexactprotocol',
252 default='',
252 default='',
253 )
253 )
254 coreconfigitem('devel', 'serverrequirecert',
254 coreconfigitem('devel', 'serverrequirecert',
255 default=False,
255 default=False,
256 )
256 )
257 coreconfigitem('devel', 'strip-obsmarkers',
257 coreconfigitem('devel', 'strip-obsmarkers',
258 default=True,
258 default=True,
259 )
259 )
260 coreconfigitem('devel', 'warn-config',
260 coreconfigitem('devel', 'warn-config',
261 default=None,
261 default=None,
262 )
262 )
263 coreconfigitem('devel', 'warn-config-default',
263 coreconfigitem('devel', 'warn-config-default',
264 default=None,
264 default=None,
265 )
265 )
266 coreconfigitem('devel', 'user.obsmarker',
266 coreconfigitem('devel', 'user.obsmarker',
267 default=None,
267 default=None,
268 )
268 )
269 coreconfigitem('devel', 'warn-config-unknown',
269 coreconfigitem('devel', 'warn-config-unknown',
270 default=None,
270 default=None,
271 )
271 )
272 coreconfigitem('diff', 'nodates',
272 coreconfigitem('diff', 'nodates',
273 default=False,
273 default=False,
274 )
274 )
275 coreconfigitem('diff', 'showfunc',
275 coreconfigitem('diff', 'showfunc',
276 default=False,
276 default=False,
277 )
277 )
278 coreconfigitem('diff', 'unified',
278 coreconfigitem('diff', 'unified',
279 default=None,
279 default=None,
280 )
280 )
281 coreconfigitem('diff', 'git',
281 coreconfigitem('diff', 'git',
282 default=False,
282 default=False,
283 )
283 )
284 coreconfigitem('diff', 'ignorews',
284 coreconfigitem('diff', 'ignorews',
285 default=False,
285 default=False,
286 )
286 )
287 coreconfigitem('diff', 'ignorewsamount',
287 coreconfigitem('diff', 'ignorewsamount',
288 default=False,
288 default=False,
289 )
289 )
290 coreconfigitem('diff', 'ignoreblanklines',
290 coreconfigitem('diff', 'ignoreblanklines',
291 default=False,
291 default=False,
292 )
292 )
293 coreconfigitem('diff', 'ignorewseol',
293 coreconfigitem('diff', 'ignorewseol',
294 default=False,
294 default=False,
295 )
295 )
296 coreconfigitem('diff', 'nobinary',
296 coreconfigitem('diff', 'nobinary',
297 default=False,
297 default=False,
298 )
298 )
299 coreconfigitem('diff', 'noprefix',
299 coreconfigitem('diff', 'noprefix',
300 default=False,
300 default=False,
301 )
301 )
302 coreconfigitem('email', 'bcc',
302 coreconfigitem('email', 'bcc',
303 default=None,
303 default=None,
304 )
304 )
305 coreconfigitem('email', 'cc',
305 coreconfigitem('email', 'cc',
306 default=None,
306 default=None,
307 )
307 )
308 coreconfigitem('email', 'charsets',
308 coreconfigitem('email', 'charsets',
309 default=list,
309 default=list,
310 )
310 )
311 coreconfigitem('email', 'from',
311 coreconfigitem('email', 'from',
312 default=None,
312 default=None,
313 )
313 )
314 coreconfigitem('email', 'method',
314 coreconfigitem('email', 'method',
315 default='smtp',
315 default='smtp',
316 )
316 )
317 coreconfigitem('email', 'reply-to',
317 coreconfigitem('email', 'reply-to',
318 default=None,
318 default=None,
319 )
319 )
320 coreconfigitem('email', 'to',
320 coreconfigitem('email', 'to',
321 default=None,
321 default=None,
322 )
322 )
323 coreconfigitem('experimental', 'archivemetatemplate',
323 coreconfigitem('experimental', 'archivemetatemplate',
324 default=dynamicdefault,
324 default=dynamicdefault,
325 )
325 )
326 coreconfigitem('experimental', 'bundle-phases',
326 coreconfigitem('experimental', 'bundle-phases',
327 default=False,
327 default=False,
328 )
328 )
329 coreconfigitem('experimental', 'bundle2-advertise',
329 coreconfigitem('experimental', 'bundle2-advertise',
330 default=True,
330 default=True,
331 )
331 )
332 coreconfigitem('experimental', 'bundle2-output-capture',
332 coreconfigitem('experimental', 'bundle2-output-capture',
333 default=False,
333 default=False,
334 )
334 )
335 coreconfigitem('experimental', 'bundle2.pushback',
335 coreconfigitem('experimental', 'bundle2.pushback',
336 default=False,
336 default=False,
337 )
337 )
338 coreconfigitem('experimental', 'bundle2lazylocking',
338 coreconfigitem('experimental', 'bundle2lazylocking',
339 default=False,
339 default=False,
340 )
340 )
341 coreconfigitem('experimental', 'bundlecomplevel',
341 coreconfigitem('experimental', 'bundlecomplevel',
342 default=None,
342 default=None,
343 )
343 )
344 coreconfigitem('experimental', 'changegroup3',
344 coreconfigitem('experimental', 'changegroup3',
345 default=False,
345 default=False,
346 )
346 )
347 coreconfigitem('experimental', 'clientcompressionengines',
347 coreconfigitem('experimental', 'clientcompressionengines',
348 default=list,
348 default=list,
349 )
349 )
350 coreconfigitem('experimental', 'copytrace',
350 coreconfigitem('experimental', 'copytrace',
351 default='on',
351 default='on',
352 )
352 )
353 coreconfigitem('experimental', 'copytrace.movecandidateslimit',
353 coreconfigitem('experimental', 'copytrace.movecandidateslimit',
354 default=100,
354 default=100,
355 )
355 )
356 coreconfigitem('experimental', 'copytrace.sourcecommitlimit',
356 coreconfigitem('experimental', 'copytrace.sourcecommitlimit',
357 default=100,
357 default=100,
358 )
358 )
359 coreconfigitem('experimental', 'crecordtest',
359 coreconfigitem('experimental', 'crecordtest',
360 default=None,
360 default=None,
361 )
361 )
362 coreconfigitem('experimental', 'editortmpinhg',
362 coreconfigitem('experimental', 'editortmpinhg',
363 default=False,
363 default=False,
364 )
364 )
365 coreconfigitem('experimental', 'evolution',
365 coreconfigitem('experimental', 'evolution',
366 default=list,
366 default=list,
367 )
367 )
368 coreconfigitem('experimental', 'evolution.allowdivergence',
368 coreconfigitem('experimental', 'evolution.allowdivergence',
369 default=False,
369 default=False,
370 alias=[('experimental', 'allowdivergence')]
370 alias=[('experimental', 'allowdivergence')]
371 )
371 )
372 coreconfigitem('experimental', 'evolution.allowunstable',
372 coreconfigitem('experimental', 'evolution.allowunstable',
373 default=None,
373 default=None,
374 )
374 )
375 coreconfigitem('experimental', 'evolution.createmarkers',
375 coreconfigitem('experimental', 'evolution.createmarkers',
376 default=None,
376 default=None,
377 )
377 )
378 coreconfigitem('experimental', 'evolution.effect-flags',
378 coreconfigitem('experimental', 'evolution.effect-flags',
379 default=False,
379 default=False,
380 alias=[('experimental', 'effect-flags')]
380 alias=[('experimental', 'effect-flags')]
381 )
381 )
382 coreconfigitem('experimental', 'evolution.exchange',
382 coreconfigitem('experimental', 'evolution.exchange',
383 default=None,
383 default=None,
384 )
384 )
385 coreconfigitem('experimental', 'evolution.bundle-obsmarker',
385 coreconfigitem('experimental', 'evolution.bundle-obsmarker',
386 default=False,
386 default=False,
387 )
387 )
388 coreconfigitem('experimental', 'evolution.track-operation',
388 coreconfigitem('experimental', 'evolution.track-operation',
389 default=True,
389 default=True,
390 )
390 )
391 coreconfigitem('experimental', 'maxdeltachainspan',
391 coreconfigitem('experimental', 'maxdeltachainspan',
392 default=-1,
392 default=-1,
393 )
393 )
394 coreconfigitem('experimental', 'mmapindexthreshold',
394 coreconfigitem('experimental', 'mmapindexthreshold',
395 default=None,
395 default=None,
396 )
396 )
397 coreconfigitem('experimental', 'nonnormalparanoidcheck',
397 coreconfigitem('experimental', 'nonnormalparanoidcheck',
398 default=False,
398 default=False,
399 )
399 )
400 coreconfigitem('experimental', 'exportableenviron',
400 coreconfigitem('experimental', 'exportableenviron',
401 default=list,
401 default=list,
402 )
402 )
403 coreconfigitem('experimental', 'extendedheader.index',
403 coreconfigitem('experimental', 'extendedheader.index',
404 default=None,
404 default=None,
405 )
405 )
406 coreconfigitem('experimental', 'extendedheader.similarity',
406 coreconfigitem('experimental', 'extendedheader.similarity',
407 default=False,
407 default=False,
408 )
408 )
409 coreconfigitem('experimental', 'format.compression',
409 coreconfigitem('experimental', 'format.compression',
410 default='zlib',
410 default='zlib',
411 )
411 )
412 coreconfigitem('experimental', 'graphshorten',
412 coreconfigitem('experimental', 'graphshorten',
413 default=False,
413 default=False,
414 )
414 )
415 coreconfigitem('experimental', 'graphstyle.parent',
415 coreconfigitem('experimental', 'graphstyle.parent',
416 default=dynamicdefault,
416 default=dynamicdefault,
417 )
417 )
418 coreconfigitem('experimental', 'graphstyle.missing',
418 coreconfigitem('experimental', 'graphstyle.missing',
419 default=dynamicdefault,
419 default=dynamicdefault,
420 )
420 )
421 coreconfigitem('experimental', 'graphstyle.grandparent',
421 coreconfigitem('experimental', 'graphstyle.grandparent',
422 default=dynamicdefault,
422 default=dynamicdefault,
423 )
423 )
424 coreconfigitem('experimental', 'hook-track-tags',
424 coreconfigitem('experimental', 'hook-track-tags',
425 default=False,
425 default=False,
426 )
426 )
427 coreconfigitem('experimental', 'httppostargs',
427 coreconfigitem('experimental', 'httppostargs',
428 default=False,
428 default=False,
429 )
429 )
430 coreconfigitem('experimental', 'manifestv2',
430 coreconfigitem('experimental', 'manifestv2',
431 default=False,
431 default=False,
432 )
432 )
433 coreconfigitem('experimental', 'mergedriver',
433 coreconfigitem('experimental', 'mergedriver',
434 default=None,
434 default=None,
435 )
435 )
436 coreconfigitem('experimental', 'obsmarkers-exchange-debug',
436 coreconfigitem('experimental', 'obsmarkers-exchange-debug',
437 default=False,
437 default=False,
438 )
438 )
439 coreconfigitem('experimental', 'rebase.multidest',
439 coreconfigitem('experimental', 'rebase.multidest',
440 default=False,
440 default=False,
441 )
441 )
442 coreconfigitem('experimental', 'revertalternateinteractivemode',
442 coreconfigitem('experimental', 'revertalternateinteractivemode',
443 default=True,
443 default=True,
444 )
444 )
445 coreconfigitem('experimental', 'revlogv2',
445 coreconfigitem('experimental', 'revlogv2',
446 default=None,
446 default=None,
447 )
447 )
448 coreconfigitem('experimental', 'spacemovesdown',
448 coreconfigitem('experimental', 'spacemovesdown',
449 default=False,
449 default=False,
450 )
450 )
451 coreconfigitem('experimental', 'sparse-read',
451 coreconfigitem('experimental', 'sparse-read',
452 default=False,
452 default=False,
453 )
453 )
454 coreconfigitem('experimental', 'sparse-read.density-threshold',
454 coreconfigitem('experimental', 'sparse-read.density-threshold',
455 default=0.25,
455 default=0.25,
456 )
456 )
457 coreconfigitem('experimental', 'sparse-read.min-gap-size',
457 coreconfigitem('experimental', 'sparse-read.min-gap-size',
458 default='256K',
458 default='256K',
459 )
459 )
460 coreconfigitem('experimental', 'treemanifest',
460 coreconfigitem('experimental', 'treemanifest',
461 default=False,
461 default=False,
462 )
462 )
463 coreconfigitem('extensions', '.*',
463 coreconfigitem('extensions', '.*',
464 default=None,
464 default=None,
465 generic=True,
465 generic=True,
466 )
466 )
467 coreconfigitem('extdata', '.*',
467 coreconfigitem('extdata', '.*',
468 default=None,
468 default=None,
469 generic=True,
469 generic=True,
470 )
470 )
471 coreconfigitem('format', 'aggressivemergedeltas',
471 coreconfigitem('format', 'aggressivemergedeltas',
472 default=False,
472 default=False,
473 )
473 )
474 coreconfigitem('format', 'chunkcachesize',
474 coreconfigitem('format', 'chunkcachesize',
475 default=None,
475 default=None,
476 )
476 )
477 coreconfigitem('format', 'dotencode',
477 coreconfigitem('format', 'dotencode',
478 default=True,
478 default=True,
479 )
479 )
480 coreconfigitem('format', 'generaldelta',
480 coreconfigitem('format', 'generaldelta',
481 default=False,
481 default=False,
482 )
482 )
483 coreconfigitem('format', 'manifestcachesize',
483 coreconfigitem('format', 'manifestcachesize',
484 default=None,
484 default=None,
485 )
485 )
486 coreconfigitem('format', 'maxchainlen',
486 coreconfigitem('format', 'maxchainlen',
487 default=None,
487 default=None,
488 )
488 )
489 coreconfigitem('format', 'obsstore-version',
489 coreconfigitem('format', 'obsstore-version',
490 default=None,
490 default=None,
491 )
491 )
492 coreconfigitem('format', 'usefncache',
492 coreconfigitem('format', 'usefncache',
493 default=True,
493 default=True,
494 )
494 )
495 coreconfigitem('format', 'usegeneraldelta',
495 coreconfigitem('format', 'usegeneraldelta',
496 default=True,
496 default=True,
497 )
497 )
498 coreconfigitem('format', 'usestore',
498 coreconfigitem('format', 'usestore',
499 default=True,
499 default=True,
500 )
500 )
501 coreconfigitem('fsmonitor', 'warn_when_unused',
501 coreconfigitem('fsmonitor', 'warn_when_unused',
502 default=True,
502 default=True,
503 )
503 )
504 coreconfigitem('fsmonitor', 'warn_update_file_count',
504 coreconfigitem('fsmonitor', 'warn_update_file_count',
505 default=50000,
505 default=50000,
506 )
506 )
507 coreconfigitem('hooks', '.*',
507 coreconfigitem('hooks', '.*',
508 default=dynamicdefault,
508 default=dynamicdefault,
509 generic=True,
509 generic=True,
510 )
510 )
511 coreconfigitem('hgweb-paths', '.*',
511 coreconfigitem('hgweb-paths', '.*',
512 default=list,
512 default=list,
513 generic=True,
513 generic=True,
514 )
514 )
515 coreconfigitem('hostfingerprints', '.*',
515 coreconfigitem('hostfingerprints', '.*',
516 default=list,
516 default=list,
517 generic=True,
517 generic=True,
518 )
518 )
519 coreconfigitem('hostsecurity', 'ciphers',
519 coreconfigitem('hostsecurity', 'ciphers',
520 default=None,
520 default=None,
521 )
521 )
522 coreconfigitem('hostsecurity', 'disabletls10warning',
522 coreconfigitem('hostsecurity', 'disabletls10warning',
523 default=False,
523 default=False,
524 )
524 )
525 coreconfigitem('hostsecurity', 'minimumprotocol',
525 coreconfigitem('hostsecurity', 'minimumprotocol',
526 default=dynamicdefault,
526 default=dynamicdefault,
527 )
527 )
528 coreconfigitem('hostsecurity', '.*:minimumprotocol$',
528 coreconfigitem('hostsecurity', '.*:minimumprotocol$',
529 default=dynamicdefault,
529 default=dynamicdefault,
530 generic=True,
530 generic=True,
531 )
531 )
532 coreconfigitem('hostsecurity', '.*:ciphers$',
532 coreconfigitem('hostsecurity', '.*:ciphers$',
533 default=dynamicdefault,
533 default=dynamicdefault,
534 generic=True,
534 generic=True,
535 )
535 )
536 coreconfigitem('hostsecurity', '.*:fingerprints$',
536 coreconfigitem('hostsecurity', '.*:fingerprints$',
537 default=list,
537 default=list,
538 generic=True,
538 generic=True,
539 )
539 )
540 coreconfigitem('hostsecurity', '.*:verifycertsfile$',
540 coreconfigitem('hostsecurity', '.*:verifycertsfile$',
541 default=None,
541 default=None,
542 generic=True,
542 generic=True,
543 )
543 )
544
544
545 coreconfigitem('http_proxy', 'always',
545 coreconfigitem('http_proxy', 'always',
546 default=False,
546 default=False,
547 )
547 )
548 coreconfigitem('http_proxy', 'host',
548 coreconfigitem('http_proxy', 'host',
549 default=None,
549 default=None,
550 )
550 )
551 coreconfigitem('http_proxy', 'no',
551 coreconfigitem('http_proxy', 'no',
552 default=list,
552 default=list,
553 )
553 )
554 coreconfigitem('http_proxy', 'passwd',
554 coreconfigitem('http_proxy', 'passwd',
555 default=None,
555 default=None,
556 )
556 )
557 coreconfigitem('http_proxy', 'user',
557 coreconfigitem('http_proxy', 'user',
558 default=None,
558 default=None,
559 )
559 )
560 coreconfigitem('logtoprocess', 'commandexception',
560 coreconfigitem('logtoprocess', 'commandexception',
561 default=None,
561 default=None,
562 )
562 )
563 coreconfigitem('logtoprocess', 'commandfinish',
563 coreconfigitem('logtoprocess', 'commandfinish',
564 default=None,
564 default=None,
565 )
565 )
566 coreconfigitem('logtoprocess', 'command',
566 coreconfigitem('logtoprocess', 'command',
567 default=None,
567 default=None,
568 )
568 )
569 coreconfigitem('logtoprocess', 'develwarn',
569 coreconfigitem('logtoprocess', 'develwarn',
570 default=None,
570 default=None,
571 )
571 )
572 coreconfigitem('logtoprocess', 'uiblocked',
572 coreconfigitem('logtoprocess', 'uiblocked',
573 default=None,
573 default=None,
574 )
574 )
575 coreconfigitem('merge', 'checkunknown',
575 coreconfigitem('merge', 'checkunknown',
576 default='abort',
576 default='abort',
577 )
577 )
578 coreconfigitem('merge', 'checkignored',
578 coreconfigitem('merge', 'checkignored',
579 default='abort',
579 default='abort',
580 )
580 )
581 coreconfigitem('experimental', 'merge.checkpathconflicts',
582 default=True,
583 )
581 coreconfigitem('merge', 'followcopies',
584 coreconfigitem('merge', 'followcopies',
582 default=True,
585 default=True,
583 )
586 )
584 coreconfigitem('merge', 'on-failure',
587 coreconfigitem('merge', 'on-failure',
585 default='continue',
588 default='continue',
586 )
589 )
587 coreconfigitem('merge', 'preferancestor',
590 coreconfigitem('merge', 'preferancestor',
588 default=lambda: ['*'],
591 default=lambda: ['*'],
589 )
592 )
590 coreconfigitem('merge-tools', '.*',
593 coreconfigitem('merge-tools', '.*',
591 default=None,
594 default=None,
592 generic=True,
595 generic=True,
593 )
596 )
594 coreconfigitem('merge-tools', br'.*\.args$',
597 coreconfigitem('merge-tools', br'.*\.args$',
595 default="$local $base $other",
598 default="$local $base $other",
596 generic=True,
599 generic=True,
597 priority=-1,
600 priority=-1,
598 )
601 )
599 coreconfigitem('merge-tools', br'.*\.binary$',
602 coreconfigitem('merge-tools', br'.*\.binary$',
600 default=False,
603 default=False,
601 generic=True,
604 generic=True,
602 priority=-1,
605 priority=-1,
603 )
606 )
604 coreconfigitem('merge-tools', br'.*\.check$',
607 coreconfigitem('merge-tools', br'.*\.check$',
605 default=list,
608 default=list,
606 generic=True,
609 generic=True,
607 priority=-1,
610 priority=-1,
608 )
611 )
609 coreconfigitem('merge-tools', br'.*\.checkchanged$',
612 coreconfigitem('merge-tools', br'.*\.checkchanged$',
610 default=False,
613 default=False,
611 generic=True,
614 generic=True,
612 priority=-1,
615 priority=-1,
613 )
616 )
614 coreconfigitem('merge-tools', br'.*\.executable$',
617 coreconfigitem('merge-tools', br'.*\.executable$',
615 default=dynamicdefault,
618 default=dynamicdefault,
616 generic=True,
619 generic=True,
617 priority=-1,
620 priority=-1,
618 )
621 )
619 coreconfigitem('merge-tools', br'.*\.fixeol$',
622 coreconfigitem('merge-tools', br'.*\.fixeol$',
620 default=False,
623 default=False,
621 generic=True,
624 generic=True,
622 priority=-1,
625 priority=-1,
623 )
626 )
624 coreconfigitem('merge-tools', br'.*\.gui$',
627 coreconfigitem('merge-tools', br'.*\.gui$',
625 default=False,
628 default=False,
626 generic=True,
629 generic=True,
627 priority=-1,
630 priority=-1,
628 )
631 )
629 coreconfigitem('merge-tools', br'.*\.priority$',
632 coreconfigitem('merge-tools', br'.*\.priority$',
630 default=0,
633 default=0,
631 generic=True,
634 generic=True,
632 priority=-1,
635 priority=-1,
633 )
636 )
634 coreconfigitem('merge-tools', br'.*\.premerge$',
637 coreconfigitem('merge-tools', br'.*\.premerge$',
635 default=dynamicdefault,
638 default=dynamicdefault,
636 generic=True,
639 generic=True,
637 priority=-1,
640 priority=-1,
638 )
641 )
639 coreconfigitem('merge-tools', br'.*\.symlink$',
642 coreconfigitem('merge-tools', br'.*\.symlink$',
640 default=False,
643 default=False,
641 generic=True,
644 generic=True,
642 priority=-1,
645 priority=-1,
643 )
646 )
644 coreconfigitem('pager', 'attend-.*',
647 coreconfigitem('pager', 'attend-.*',
645 default=dynamicdefault,
648 default=dynamicdefault,
646 generic=True,
649 generic=True,
647 )
650 )
648 coreconfigitem('pager', 'ignore',
651 coreconfigitem('pager', 'ignore',
649 default=list,
652 default=list,
650 )
653 )
651 coreconfigitem('pager', 'pager',
654 coreconfigitem('pager', 'pager',
652 default=dynamicdefault,
655 default=dynamicdefault,
653 )
656 )
654 coreconfigitem('patch', 'eol',
657 coreconfigitem('patch', 'eol',
655 default='strict',
658 default='strict',
656 )
659 )
657 coreconfigitem('patch', 'fuzz',
660 coreconfigitem('patch', 'fuzz',
658 default=2,
661 default=2,
659 )
662 )
660 coreconfigitem('paths', 'default',
663 coreconfigitem('paths', 'default',
661 default=None,
664 default=None,
662 )
665 )
663 coreconfigitem('paths', 'default-push',
666 coreconfigitem('paths', 'default-push',
664 default=None,
667 default=None,
665 )
668 )
666 coreconfigitem('paths', '.*',
669 coreconfigitem('paths', '.*',
667 default=None,
670 default=None,
668 generic=True,
671 generic=True,
669 )
672 )
670 coreconfigitem('phases', 'checksubrepos',
673 coreconfigitem('phases', 'checksubrepos',
671 default='follow',
674 default='follow',
672 )
675 )
673 coreconfigitem('phases', 'new-commit',
676 coreconfigitem('phases', 'new-commit',
674 default='draft',
677 default='draft',
675 )
678 )
676 coreconfigitem('phases', 'publish',
679 coreconfigitem('phases', 'publish',
677 default=True,
680 default=True,
678 )
681 )
679 coreconfigitem('profiling', 'enabled',
682 coreconfigitem('profiling', 'enabled',
680 default=False,
683 default=False,
681 )
684 )
682 coreconfigitem('profiling', 'format',
685 coreconfigitem('profiling', 'format',
683 default='text',
686 default='text',
684 )
687 )
685 coreconfigitem('profiling', 'freq',
688 coreconfigitem('profiling', 'freq',
686 default=1000,
689 default=1000,
687 )
690 )
688 coreconfigitem('profiling', 'limit',
691 coreconfigitem('profiling', 'limit',
689 default=30,
692 default=30,
690 )
693 )
691 coreconfigitem('profiling', 'nested',
694 coreconfigitem('profiling', 'nested',
692 default=0,
695 default=0,
693 )
696 )
694 coreconfigitem('profiling', 'output',
697 coreconfigitem('profiling', 'output',
695 default=None,
698 default=None,
696 )
699 )
697 coreconfigitem('profiling', 'showmax',
700 coreconfigitem('profiling', 'showmax',
698 default=0.999,
701 default=0.999,
699 )
702 )
700 coreconfigitem('profiling', 'showmin',
703 coreconfigitem('profiling', 'showmin',
701 default=dynamicdefault,
704 default=dynamicdefault,
702 )
705 )
703 coreconfigitem('profiling', 'sort',
706 coreconfigitem('profiling', 'sort',
704 default='inlinetime',
707 default='inlinetime',
705 )
708 )
706 coreconfigitem('profiling', 'statformat',
709 coreconfigitem('profiling', 'statformat',
707 default='hotpath',
710 default='hotpath',
708 )
711 )
709 coreconfigitem('profiling', 'type',
712 coreconfigitem('profiling', 'type',
710 default='stat',
713 default='stat',
711 )
714 )
712 coreconfigitem('progress', 'assume-tty',
715 coreconfigitem('progress', 'assume-tty',
713 default=False,
716 default=False,
714 )
717 )
715 coreconfigitem('progress', 'changedelay',
718 coreconfigitem('progress', 'changedelay',
716 default=1,
719 default=1,
717 )
720 )
718 coreconfigitem('progress', 'clear-complete',
721 coreconfigitem('progress', 'clear-complete',
719 default=True,
722 default=True,
720 )
723 )
721 coreconfigitem('progress', 'debug',
724 coreconfigitem('progress', 'debug',
722 default=False,
725 default=False,
723 )
726 )
724 coreconfigitem('progress', 'delay',
727 coreconfigitem('progress', 'delay',
725 default=3,
728 default=3,
726 )
729 )
727 coreconfigitem('progress', 'disable',
730 coreconfigitem('progress', 'disable',
728 default=False,
731 default=False,
729 )
732 )
730 coreconfigitem('progress', 'estimateinterval',
733 coreconfigitem('progress', 'estimateinterval',
731 default=60.0,
734 default=60.0,
732 )
735 )
733 coreconfigitem('progress', 'format',
736 coreconfigitem('progress', 'format',
734 default=lambda: ['topic', 'bar', 'number', 'estimate'],
737 default=lambda: ['topic', 'bar', 'number', 'estimate'],
735 )
738 )
736 coreconfigitem('progress', 'refresh',
739 coreconfigitem('progress', 'refresh',
737 default=0.1,
740 default=0.1,
738 )
741 )
739 coreconfigitem('progress', 'width',
742 coreconfigitem('progress', 'width',
740 default=dynamicdefault,
743 default=dynamicdefault,
741 )
744 )
742 coreconfigitem('push', 'pushvars.server',
745 coreconfigitem('push', 'pushvars.server',
743 default=False,
746 default=False,
744 )
747 )
745 coreconfigitem('server', 'bundle1',
748 coreconfigitem('server', 'bundle1',
746 default=True,
749 default=True,
747 )
750 )
748 coreconfigitem('server', 'bundle1gd',
751 coreconfigitem('server', 'bundle1gd',
749 default=None,
752 default=None,
750 )
753 )
751 coreconfigitem('server', 'bundle1.pull',
754 coreconfigitem('server', 'bundle1.pull',
752 default=None,
755 default=None,
753 )
756 )
754 coreconfigitem('server', 'bundle1gd.pull',
757 coreconfigitem('server', 'bundle1gd.pull',
755 default=None,
758 default=None,
756 )
759 )
757 coreconfigitem('server', 'bundle1.push',
760 coreconfigitem('server', 'bundle1.push',
758 default=None,
761 default=None,
759 )
762 )
760 coreconfigitem('server', 'bundle1gd.push',
763 coreconfigitem('server', 'bundle1gd.push',
761 default=None,
764 default=None,
762 )
765 )
763 coreconfigitem('server', 'compressionengines',
766 coreconfigitem('server', 'compressionengines',
764 default=list,
767 default=list,
765 )
768 )
766 coreconfigitem('server', 'concurrent-push-mode',
769 coreconfigitem('server', 'concurrent-push-mode',
767 default='strict',
770 default='strict',
768 )
771 )
769 coreconfigitem('server', 'disablefullbundle',
772 coreconfigitem('server', 'disablefullbundle',
770 default=False,
773 default=False,
771 )
774 )
772 coreconfigitem('server', 'maxhttpheaderlen',
775 coreconfigitem('server', 'maxhttpheaderlen',
773 default=1024,
776 default=1024,
774 )
777 )
775 coreconfigitem('server', 'preferuncompressed',
778 coreconfigitem('server', 'preferuncompressed',
776 default=False,
779 default=False,
777 )
780 )
778 coreconfigitem('server', 'uncompressed',
781 coreconfigitem('server', 'uncompressed',
779 default=True,
782 default=True,
780 )
783 )
781 coreconfigitem('server', 'uncompressedallowsecret',
784 coreconfigitem('server', 'uncompressedallowsecret',
782 default=False,
785 default=False,
783 )
786 )
784 coreconfigitem('server', 'validate',
787 coreconfigitem('server', 'validate',
785 default=False,
788 default=False,
786 )
789 )
787 coreconfigitem('server', 'zliblevel',
790 coreconfigitem('server', 'zliblevel',
788 default=-1,
791 default=-1,
789 )
792 )
790 coreconfigitem('smtp', 'host',
793 coreconfigitem('smtp', 'host',
791 default=None,
794 default=None,
792 )
795 )
793 coreconfigitem('smtp', 'local_hostname',
796 coreconfigitem('smtp', 'local_hostname',
794 default=None,
797 default=None,
795 )
798 )
796 coreconfigitem('smtp', 'password',
799 coreconfigitem('smtp', 'password',
797 default=None,
800 default=None,
798 )
801 )
799 coreconfigitem('smtp', 'port',
802 coreconfigitem('smtp', 'port',
800 default=dynamicdefault,
803 default=dynamicdefault,
801 )
804 )
802 coreconfigitem('smtp', 'tls',
805 coreconfigitem('smtp', 'tls',
803 default='none',
806 default='none',
804 )
807 )
805 coreconfigitem('smtp', 'username',
808 coreconfigitem('smtp', 'username',
806 default=None,
809 default=None,
807 )
810 )
808 coreconfigitem('sparse', 'missingwarning',
811 coreconfigitem('sparse', 'missingwarning',
809 default=True,
812 default=True,
810 )
813 )
811 coreconfigitem('templates', '.*',
814 coreconfigitem('templates', '.*',
812 default=None,
815 default=None,
813 generic=True,
816 generic=True,
814 )
817 )
815 coreconfigitem('trusted', 'groups',
818 coreconfigitem('trusted', 'groups',
816 default=list,
819 default=list,
817 )
820 )
818 coreconfigitem('trusted', 'users',
821 coreconfigitem('trusted', 'users',
819 default=list,
822 default=list,
820 )
823 )
821 coreconfigitem('ui', '_usedassubrepo',
824 coreconfigitem('ui', '_usedassubrepo',
822 default=False,
825 default=False,
823 )
826 )
824 coreconfigitem('ui', 'allowemptycommit',
827 coreconfigitem('ui', 'allowemptycommit',
825 default=False,
828 default=False,
826 )
829 )
827 coreconfigitem('ui', 'archivemeta',
830 coreconfigitem('ui', 'archivemeta',
828 default=True,
831 default=True,
829 )
832 )
830 coreconfigitem('ui', 'askusername',
833 coreconfigitem('ui', 'askusername',
831 default=False,
834 default=False,
832 )
835 )
833 coreconfigitem('ui', 'clonebundlefallback',
836 coreconfigitem('ui', 'clonebundlefallback',
834 default=False,
837 default=False,
835 )
838 )
836 coreconfigitem('ui', 'clonebundleprefers',
839 coreconfigitem('ui', 'clonebundleprefers',
837 default=list,
840 default=list,
838 )
841 )
839 coreconfigitem('ui', 'clonebundles',
842 coreconfigitem('ui', 'clonebundles',
840 default=True,
843 default=True,
841 )
844 )
842 coreconfigitem('ui', 'color',
845 coreconfigitem('ui', 'color',
843 default='auto',
846 default='auto',
844 )
847 )
845 coreconfigitem('ui', 'commitsubrepos',
848 coreconfigitem('ui', 'commitsubrepos',
846 default=False,
849 default=False,
847 )
850 )
848 coreconfigitem('ui', 'debug',
851 coreconfigitem('ui', 'debug',
849 default=False,
852 default=False,
850 )
853 )
851 coreconfigitem('ui', 'debugger',
854 coreconfigitem('ui', 'debugger',
852 default=None,
855 default=None,
853 )
856 )
854 coreconfigitem('ui', 'editor',
857 coreconfigitem('ui', 'editor',
855 default=dynamicdefault,
858 default=dynamicdefault,
856 )
859 )
857 coreconfigitem('ui', 'fallbackencoding',
860 coreconfigitem('ui', 'fallbackencoding',
858 default=None,
861 default=None,
859 )
862 )
860 coreconfigitem('ui', 'forcecwd',
863 coreconfigitem('ui', 'forcecwd',
861 default=None,
864 default=None,
862 )
865 )
863 coreconfigitem('ui', 'forcemerge',
866 coreconfigitem('ui', 'forcemerge',
864 default=None,
867 default=None,
865 )
868 )
866 coreconfigitem('ui', 'formatdebug',
869 coreconfigitem('ui', 'formatdebug',
867 default=False,
870 default=False,
868 )
871 )
869 coreconfigitem('ui', 'formatjson',
872 coreconfigitem('ui', 'formatjson',
870 default=False,
873 default=False,
871 )
874 )
872 coreconfigitem('ui', 'formatted',
875 coreconfigitem('ui', 'formatted',
873 default=None,
876 default=None,
874 )
877 )
875 coreconfigitem('ui', 'graphnodetemplate',
878 coreconfigitem('ui', 'graphnodetemplate',
876 default=None,
879 default=None,
877 )
880 )
878 coreconfigitem('ui', 'http2debuglevel',
881 coreconfigitem('ui', 'http2debuglevel',
879 default=None,
882 default=None,
880 )
883 )
881 coreconfigitem('ui', 'interactive',
884 coreconfigitem('ui', 'interactive',
882 default=None,
885 default=None,
883 )
886 )
884 coreconfigitem('ui', 'interface',
887 coreconfigitem('ui', 'interface',
885 default=None,
888 default=None,
886 )
889 )
887 coreconfigitem('ui', 'interface.chunkselector',
890 coreconfigitem('ui', 'interface.chunkselector',
888 default=None,
891 default=None,
889 )
892 )
890 coreconfigitem('ui', 'logblockedtimes',
893 coreconfigitem('ui', 'logblockedtimes',
891 default=False,
894 default=False,
892 )
895 )
893 coreconfigitem('ui', 'logtemplate',
896 coreconfigitem('ui', 'logtemplate',
894 default=None,
897 default=None,
895 )
898 )
896 coreconfigitem('ui', 'merge',
899 coreconfigitem('ui', 'merge',
897 default=None,
900 default=None,
898 )
901 )
899 coreconfigitem('ui', 'mergemarkers',
902 coreconfigitem('ui', 'mergemarkers',
900 default='basic',
903 default='basic',
901 )
904 )
902 coreconfigitem('ui', 'mergemarkertemplate',
905 coreconfigitem('ui', 'mergemarkertemplate',
903 default=('{node|short} '
906 default=('{node|short} '
904 '{ifeq(tags, "tip", "", '
907 '{ifeq(tags, "tip", "", '
905 'ifeq(tags, "", "", "{tags} "))}'
908 'ifeq(tags, "", "", "{tags} "))}'
906 '{if(bookmarks, "{bookmarks} ")}'
909 '{if(bookmarks, "{bookmarks} ")}'
907 '{ifeq(branch, "default", "", "{branch} ")}'
910 '{ifeq(branch, "default", "", "{branch} ")}'
908 '- {author|user}: {desc|firstline}')
911 '- {author|user}: {desc|firstline}')
909 )
912 )
910 coreconfigitem('ui', 'nontty',
913 coreconfigitem('ui', 'nontty',
911 default=False,
914 default=False,
912 )
915 )
913 coreconfigitem('ui', 'origbackuppath',
916 coreconfigitem('ui', 'origbackuppath',
914 default=None,
917 default=None,
915 )
918 )
916 coreconfigitem('ui', 'paginate',
919 coreconfigitem('ui', 'paginate',
917 default=True,
920 default=True,
918 )
921 )
919 coreconfigitem('ui', 'patch',
922 coreconfigitem('ui', 'patch',
920 default=None,
923 default=None,
921 )
924 )
922 coreconfigitem('ui', 'portablefilenames',
925 coreconfigitem('ui', 'portablefilenames',
923 default='warn',
926 default='warn',
924 )
927 )
925 coreconfigitem('ui', 'promptecho',
928 coreconfigitem('ui', 'promptecho',
926 default=False,
929 default=False,
927 )
930 )
928 coreconfigitem('ui', 'quiet',
931 coreconfigitem('ui', 'quiet',
929 default=False,
932 default=False,
930 )
933 )
931 coreconfigitem('ui', 'quietbookmarkmove',
934 coreconfigitem('ui', 'quietbookmarkmove',
932 default=False,
935 default=False,
933 )
936 )
934 coreconfigitem('ui', 'remotecmd',
937 coreconfigitem('ui', 'remotecmd',
935 default='hg',
938 default='hg',
936 )
939 )
937 coreconfigitem('ui', 'report_untrusted',
940 coreconfigitem('ui', 'report_untrusted',
938 default=True,
941 default=True,
939 )
942 )
940 coreconfigitem('ui', 'rollback',
943 coreconfigitem('ui', 'rollback',
941 default=True,
944 default=True,
942 )
945 )
943 coreconfigitem('ui', 'slash',
946 coreconfigitem('ui', 'slash',
944 default=False,
947 default=False,
945 )
948 )
946 coreconfigitem('ui', 'ssh',
949 coreconfigitem('ui', 'ssh',
947 default='ssh',
950 default='ssh',
948 )
951 )
949 coreconfigitem('ui', 'statuscopies',
952 coreconfigitem('ui', 'statuscopies',
950 default=False,
953 default=False,
951 )
954 )
952 coreconfigitem('ui', 'strict',
955 coreconfigitem('ui', 'strict',
953 default=False,
956 default=False,
954 )
957 )
955 coreconfigitem('ui', 'style',
958 coreconfigitem('ui', 'style',
956 default='',
959 default='',
957 )
960 )
958 coreconfigitem('ui', 'supportcontact',
961 coreconfigitem('ui', 'supportcontact',
959 default=None,
962 default=None,
960 )
963 )
961 coreconfigitem('ui', 'textwidth',
964 coreconfigitem('ui', 'textwidth',
962 default=78,
965 default=78,
963 )
966 )
964 coreconfigitem('ui', 'timeout',
967 coreconfigitem('ui', 'timeout',
965 default='600',
968 default='600',
966 )
969 )
967 coreconfigitem('ui', 'traceback',
970 coreconfigitem('ui', 'traceback',
968 default=False,
971 default=False,
969 )
972 )
970 coreconfigitem('ui', 'tweakdefaults',
973 coreconfigitem('ui', 'tweakdefaults',
971 default=False,
974 default=False,
972 )
975 )
973 coreconfigitem('ui', 'usehttp2',
976 coreconfigitem('ui', 'usehttp2',
974 default=False,
977 default=False,
975 )
978 )
976 coreconfigitem('ui', 'username',
979 coreconfigitem('ui', 'username',
977 alias=[('ui', 'user')]
980 alias=[('ui', 'user')]
978 )
981 )
979 coreconfigitem('ui', 'verbose',
982 coreconfigitem('ui', 'verbose',
980 default=False,
983 default=False,
981 )
984 )
982 coreconfigitem('verify', 'skipflags',
985 coreconfigitem('verify', 'skipflags',
983 default=None,
986 default=None,
984 )
987 )
985 coreconfigitem('web', 'allowbz2',
988 coreconfigitem('web', 'allowbz2',
986 default=False,
989 default=False,
987 )
990 )
988 coreconfigitem('web', 'allowgz',
991 coreconfigitem('web', 'allowgz',
989 default=False,
992 default=False,
990 )
993 )
991 coreconfigitem('web', 'allowpull',
994 coreconfigitem('web', 'allowpull',
992 default=True,
995 default=True,
993 )
996 )
994 coreconfigitem('web', 'allow_push',
997 coreconfigitem('web', 'allow_push',
995 default=list,
998 default=list,
996 )
999 )
997 coreconfigitem('web', 'allowzip',
1000 coreconfigitem('web', 'allowzip',
998 default=False,
1001 default=False,
999 )
1002 )
1000 coreconfigitem('web', 'archivesubrepos',
1003 coreconfigitem('web', 'archivesubrepos',
1001 default=False,
1004 default=False,
1002 )
1005 )
1003 coreconfigitem('web', 'cache',
1006 coreconfigitem('web', 'cache',
1004 default=True,
1007 default=True,
1005 )
1008 )
1006 coreconfigitem('web', 'contact',
1009 coreconfigitem('web', 'contact',
1007 default=None,
1010 default=None,
1008 )
1011 )
1009 coreconfigitem('web', 'deny_push',
1012 coreconfigitem('web', 'deny_push',
1010 default=list,
1013 default=list,
1011 )
1014 )
1012 coreconfigitem('web', 'guessmime',
1015 coreconfigitem('web', 'guessmime',
1013 default=False,
1016 default=False,
1014 )
1017 )
1015 coreconfigitem('web', 'hidden',
1018 coreconfigitem('web', 'hidden',
1016 default=False,
1019 default=False,
1017 )
1020 )
1018 coreconfigitem('web', 'labels',
1021 coreconfigitem('web', 'labels',
1019 default=list,
1022 default=list,
1020 )
1023 )
1021 coreconfigitem('web', 'logoimg',
1024 coreconfigitem('web', 'logoimg',
1022 default='hglogo.png',
1025 default='hglogo.png',
1023 )
1026 )
1024 coreconfigitem('web', 'logourl',
1027 coreconfigitem('web', 'logourl',
1025 default='https://mercurial-scm.org/',
1028 default='https://mercurial-scm.org/',
1026 )
1029 )
1027 coreconfigitem('web', 'accesslog',
1030 coreconfigitem('web', 'accesslog',
1028 default='-',
1031 default='-',
1029 )
1032 )
1030 coreconfigitem('web', 'address',
1033 coreconfigitem('web', 'address',
1031 default='',
1034 default='',
1032 )
1035 )
1033 coreconfigitem('web', 'allow_archive',
1036 coreconfigitem('web', 'allow_archive',
1034 default=list,
1037 default=list,
1035 )
1038 )
1036 coreconfigitem('web', 'allow_read',
1039 coreconfigitem('web', 'allow_read',
1037 default=list,
1040 default=list,
1038 )
1041 )
1039 coreconfigitem('web', 'baseurl',
1042 coreconfigitem('web', 'baseurl',
1040 default=None,
1043 default=None,
1041 )
1044 )
1042 coreconfigitem('web', 'cacerts',
1045 coreconfigitem('web', 'cacerts',
1043 default=None,
1046 default=None,
1044 )
1047 )
1045 coreconfigitem('web', 'certificate',
1048 coreconfigitem('web', 'certificate',
1046 default=None,
1049 default=None,
1047 )
1050 )
1048 coreconfigitem('web', 'collapse',
1051 coreconfigitem('web', 'collapse',
1049 default=False,
1052 default=False,
1050 )
1053 )
1051 coreconfigitem('web', 'csp',
1054 coreconfigitem('web', 'csp',
1052 default=None,
1055 default=None,
1053 )
1056 )
1054 coreconfigitem('web', 'deny_read',
1057 coreconfigitem('web', 'deny_read',
1055 default=list,
1058 default=list,
1056 )
1059 )
1057 coreconfigitem('web', 'descend',
1060 coreconfigitem('web', 'descend',
1058 default=True,
1061 default=True,
1059 )
1062 )
1060 coreconfigitem('web', 'description',
1063 coreconfigitem('web', 'description',
1061 default="",
1064 default="",
1062 )
1065 )
1063 coreconfigitem('web', 'encoding',
1066 coreconfigitem('web', 'encoding',
1064 default=lambda: encoding.encoding,
1067 default=lambda: encoding.encoding,
1065 )
1068 )
1066 coreconfigitem('web', 'errorlog',
1069 coreconfigitem('web', 'errorlog',
1067 default='-',
1070 default='-',
1068 )
1071 )
1069 coreconfigitem('web', 'ipv6',
1072 coreconfigitem('web', 'ipv6',
1070 default=False,
1073 default=False,
1071 )
1074 )
1072 coreconfigitem('web', 'maxchanges',
1075 coreconfigitem('web', 'maxchanges',
1073 default=10,
1076 default=10,
1074 )
1077 )
1075 coreconfigitem('web', 'maxfiles',
1078 coreconfigitem('web', 'maxfiles',
1076 default=10,
1079 default=10,
1077 )
1080 )
1078 coreconfigitem('web', 'maxshortchanges',
1081 coreconfigitem('web', 'maxshortchanges',
1079 default=60,
1082 default=60,
1080 )
1083 )
1081 coreconfigitem('web', 'motd',
1084 coreconfigitem('web', 'motd',
1082 default='',
1085 default='',
1083 )
1086 )
1084 coreconfigitem('web', 'name',
1087 coreconfigitem('web', 'name',
1085 default=dynamicdefault,
1088 default=dynamicdefault,
1086 )
1089 )
1087 coreconfigitem('web', 'port',
1090 coreconfigitem('web', 'port',
1088 default=8000,
1091 default=8000,
1089 )
1092 )
1090 coreconfigitem('web', 'prefix',
1093 coreconfigitem('web', 'prefix',
1091 default='',
1094 default='',
1092 )
1095 )
1093 coreconfigitem('web', 'push_ssl',
1096 coreconfigitem('web', 'push_ssl',
1094 default=True,
1097 default=True,
1095 )
1098 )
1096 coreconfigitem('web', 'refreshinterval',
1099 coreconfigitem('web', 'refreshinterval',
1097 default=20,
1100 default=20,
1098 )
1101 )
1099 coreconfigitem('web', 'staticurl',
1102 coreconfigitem('web', 'staticurl',
1100 default=None,
1103 default=None,
1101 )
1104 )
1102 coreconfigitem('web', 'stripes',
1105 coreconfigitem('web', 'stripes',
1103 default=1,
1106 default=1,
1104 )
1107 )
1105 coreconfigitem('web', 'style',
1108 coreconfigitem('web', 'style',
1106 default='paper',
1109 default='paper',
1107 )
1110 )
1108 coreconfigitem('web', 'templates',
1111 coreconfigitem('web', 'templates',
1109 default=None,
1112 default=None,
1110 )
1113 )
1111 coreconfigitem('web', 'view',
1114 coreconfigitem('web', 'view',
1112 default='served',
1115 default='served',
1113 )
1116 )
1114 coreconfigitem('worker', 'backgroundclose',
1117 coreconfigitem('worker', 'backgroundclose',
1115 default=dynamicdefault,
1118 default=dynamicdefault,
1116 )
1119 )
1117 # Windows defaults to a limit of 512 open files. A buffer of 128
1120 # Windows defaults to a limit of 512 open files. A buffer of 128
1118 # should give us enough headway.
1121 # should give us enough headway.
1119 coreconfigitem('worker', 'backgroundclosemaxqueue',
1122 coreconfigitem('worker', 'backgroundclosemaxqueue',
1120 default=384,
1123 default=384,
1121 )
1124 )
1122 coreconfigitem('worker', 'backgroundcloseminfilecount',
1125 coreconfigitem('worker', 'backgroundcloseminfilecount',
1123 default=2048,
1126 default=2048,
1124 )
1127 )
1125 coreconfigitem('worker', 'backgroundclosethreadcount',
1128 coreconfigitem('worker', 'backgroundclosethreadcount',
1126 default=4,
1129 default=4,
1127 )
1130 )
1128 coreconfigitem('worker', 'numcpus',
1131 coreconfigitem('worker', 'numcpus',
1129 default=None,
1132 default=None,
1130 )
1133 )
1131
1134
1132 # Rebase related configuration moved to core because other extension are doing
1135 # Rebase related configuration moved to core because other extension are doing
1133 # strange things. For example, shelve import the extensions to reuse some bit
1136 # strange things. For example, shelve import the extensions to reuse some bit
1134 # without formally loading it.
1137 # without formally loading it.
1135 coreconfigitem('commands', 'rebase.requiredest',
1138 coreconfigitem('commands', 'rebase.requiredest',
1136 default=False,
1139 default=False,
1137 )
1140 )
1138 coreconfigitem('experimental', 'rebaseskipobsolete',
1141 coreconfigitem('experimental', 'rebaseskipobsolete',
1139 default=True,
1142 default=True,
1140 )
1143 )
1141 coreconfigitem('rebase', 'singletransaction',
1144 coreconfigitem('rebase', 'singletransaction',
1142 default=False,
1145 default=False,
1143 )
1146 )
@@ -1,2038 +1,2040 b''
1 # merge.py - directory-level update/merge handling for Mercurial
1 # merge.py - directory-level update/merge handling for Mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import hashlib
11 import hashlib
12 import shutil
12 import shutil
13 import struct
13 import struct
14
14
15 from .i18n import _
15 from .i18n import _
16 from .node import (
16 from .node import (
17 addednodeid,
17 addednodeid,
18 bin,
18 bin,
19 hex,
19 hex,
20 modifiednodeid,
20 modifiednodeid,
21 nullhex,
21 nullhex,
22 nullid,
22 nullid,
23 nullrev,
23 nullrev,
24 )
24 )
25 from . import (
25 from . import (
26 copies,
26 copies,
27 error,
27 error,
28 extensions,
28 extensions,
29 filemerge,
29 filemerge,
30 match as matchmod,
30 match as matchmod,
31 obsutil,
31 obsutil,
32 pycompat,
32 pycompat,
33 scmutil,
33 scmutil,
34 subrepo,
34 subrepo,
35 util,
35 util,
36 worker,
36 worker,
37 )
37 )
38
38
39 _pack = struct.pack
39 _pack = struct.pack
40 _unpack = struct.unpack
40 _unpack = struct.unpack
41
41
42 def _droponode(data):
42 def _droponode(data):
43 # used for compatibility for v1
43 # used for compatibility for v1
44 bits = data.split('\0')
44 bits = data.split('\0')
45 bits = bits[:-2] + bits[-1:]
45 bits = bits[:-2] + bits[-1:]
46 return '\0'.join(bits)
46 return '\0'.join(bits)
47
47
48 class mergestate(object):
48 class mergestate(object):
49 '''track 3-way merge state of individual files
49 '''track 3-way merge state of individual files
50
50
51 The merge state is stored on disk when needed. Two files are used: one with
51 The merge state is stored on disk when needed. Two files are used: one with
52 an old format (version 1), and one with a new format (version 2). Version 2
52 an old format (version 1), and one with a new format (version 2). Version 2
53 stores a superset of the data in version 1, including new kinds of records
53 stores a superset of the data in version 1, including new kinds of records
54 in the future. For more about the new format, see the documentation for
54 in the future. For more about the new format, see the documentation for
55 `_readrecordsv2`.
55 `_readrecordsv2`.
56
56
57 Each record can contain arbitrary content, and has an associated type. This
57 Each record can contain arbitrary content, and has an associated type. This
58 `type` should be a letter. If `type` is uppercase, the record is mandatory:
58 `type` should be a letter. If `type` is uppercase, the record is mandatory:
59 versions of Mercurial that don't support it should abort. If `type` is
59 versions of Mercurial that don't support it should abort. If `type` is
60 lowercase, the record can be safely ignored.
60 lowercase, the record can be safely ignored.
61
61
62 Currently known records:
62 Currently known records:
63
63
64 L: the node of the "local" part of the merge (hexified version)
64 L: the node of the "local" part of the merge (hexified version)
65 O: the node of the "other" part of the merge (hexified version)
65 O: the node of the "other" part of the merge (hexified version)
66 F: a file to be merged entry
66 F: a file to be merged entry
67 C: a change/delete or delete/change conflict
67 C: a change/delete or delete/change conflict
68 D: a file that the external merge driver will merge internally
68 D: a file that the external merge driver will merge internally
69 (experimental)
69 (experimental)
70 P: a path conflict (file vs directory)
70 P: a path conflict (file vs directory)
71 m: the external merge driver defined for this merge plus its run state
71 m: the external merge driver defined for this merge plus its run state
72 (experimental)
72 (experimental)
73 f: a (filename, dictionary) tuple of optional values for a given file
73 f: a (filename, dictionary) tuple of optional values for a given file
74 X: unsupported mandatory record type (used in tests)
74 X: unsupported mandatory record type (used in tests)
75 x: unsupported advisory record type (used in tests)
75 x: unsupported advisory record type (used in tests)
76 l: the labels for the parts of the merge.
76 l: the labels for the parts of the merge.
77
77
78 Merge driver run states (experimental):
78 Merge driver run states (experimental):
79 u: driver-resolved files unmarked -- needs to be run next time we're about
79 u: driver-resolved files unmarked -- needs to be run next time we're about
80 to resolve or commit
80 to resolve or commit
81 m: driver-resolved files marked -- only needs to be run before commit
81 m: driver-resolved files marked -- only needs to be run before commit
82 s: success/skipped -- does not need to be run any more
82 s: success/skipped -- does not need to be run any more
83
83
84 Merge record states (stored in self._state, indexed by filename):
84 Merge record states (stored in self._state, indexed by filename):
85 u: unresolved conflict
85 u: unresolved conflict
86 r: resolved conflict
86 r: resolved conflict
87 pu: unresolved path conflict (file conflicts with directory)
87 pu: unresolved path conflict (file conflicts with directory)
88 pr: resolved path conflict
88 pr: resolved path conflict
89 d: driver-resolved conflict
89 d: driver-resolved conflict
90
90
91 The resolve command transitions between 'u' and 'r' for conflicts and
91 The resolve command transitions between 'u' and 'r' for conflicts and
92 'pu' and 'pr' for path conflicts.
92 'pu' and 'pr' for path conflicts.
93 '''
93 '''
94 statepathv1 = 'merge/state'
94 statepathv1 = 'merge/state'
95 statepathv2 = 'merge/state2'
95 statepathv2 = 'merge/state2'
96
96
97 @staticmethod
97 @staticmethod
98 def clean(repo, node=None, other=None, labels=None):
98 def clean(repo, node=None, other=None, labels=None):
99 """Initialize a brand new merge state, removing any existing state on
99 """Initialize a brand new merge state, removing any existing state on
100 disk."""
100 disk."""
101 ms = mergestate(repo)
101 ms = mergestate(repo)
102 ms.reset(node, other, labels)
102 ms.reset(node, other, labels)
103 return ms
103 return ms
104
104
105 @staticmethod
105 @staticmethod
106 def read(repo):
106 def read(repo):
107 """Initialize the merge state, reading it from disk."""
107 """Initialize the merge state, reading it from disk."""
108 ms = mergestate(repo)
108 ms = mergestate(repo)
109 ms._read()
109 ms._read()
110 return ms
110 return ms
111
111
112 def __init__(self, repo):
112 def __init__(self, repo):
113 """Initialize the merge state.
113 """Initialize the merge state.
114
114
115 Do not use this directly! Instead call read() or clean()."""
115 Do not use this directly! Instead call read() or clean()."""
116 self._repo = repo
116 self._repo = repo
117 self._dirty = False
117 self._dirty = False
118 self._labels = None
118 self._labels = None
119
119
120 def reset(self, node=None, other=None, labels=None):
120 def reset(self, node=None, other=None, labels=None):
121 self._state = {}
121 self._state = {}
122 self._stateextras = {}
122 self._stateextras = {}
123 self._local = None
123 self._local = None
124 self._other = None
124 self._other = None
125 self._labels = labels
125 self._labels = labels
126 for var in ('localctx', 'otherctx'):
126 for var in ('localctx', 'otherctx'):
127 if var in vars(self):
127 if var in vars(self):
128 delattr(self, var)
128 delattr(self, var)
129 if node:
129 if node:
130 self._local = node
130 self._local = node
131 self._other = other
131 self._other = other
132 self._readmergedriver = None
132 self._readmergedriver = None
133 if self.mergedriver:
133 if self.mergedriver:
134 self._mdstate = 's'
134 self._mdstate = 's'
135 else:
135 else:
136 self._mdstate = 'u'
136 self._mdstate = 'u'
137 shutil.rmtree(self._repo.vfs.join('merge'), True)
137 shutil.rmtree(self._repo.vfs.join('merge'), True)
138 self._results = {}
138 self._results = {}
139 self._dirty = False
139 self._dirty = False
140
140
141 def _read(self):
141 def _read(self):
142 """Analyse each record content to restore a serialized state from disk
142 """Analyse each record content to restore a serialized state from disk
143
143
144 This function process "record" entry produced by the de-serialization
144 This function process "record" entry produced by the de-serialization
145 of on disk file.
145 of on disk file.
146 """
146 """
147 self._state = {}
147 self._state = {}
148 self._stateextras = {}
148 self._stateextras = {}
149 self._local = None
149 self._local = None
150 self._other = None
150 self._other = None
151 for var in ('localctx', 'otherctx'):
151 for var in ('localctx', 'otherctx'):
152 if var in vars(self):
152 if var in vars(self):
153 delattr(self, var)
153 delattr(self, var)
154 self._readmergedriver = None
154 self._readmergedriver = None
155 self._mdstate = 's'
155 self._mdstate = 's'
156 unsupported = set()
156 unsupported = set()
157 records = self._readrecords()
157 records = self._readrecords()
158 for rtype, record in records:
158 for rtype, record in records:
159 if rtype == 'L':
159 if rtype == 'L':
160 self._local = bin(record)
160 self._local = bin(record)
161 elif rtype == 'O':
161 elif rtype == 'O':
162 self._other = bin(record)
162 self._other = bin(record)
163 elif rtype == 'm':
163 elif rtype == 'm':
164 bits = record.split('\0', 1)
164 bits = record.split('\0', 1)
165 mdstate = bits[1]
165 mdstate = bits[1]
166 if len(mdstate) != 1 or mdstate not in 'ums':
166 if len(mdstate) != 1 or mdstate not in 'ums':
167 # the merge driver should be idempotent, so just rerun it
167 # the merge driver should be idempotent, so just rerun it
168 mdstate = 'u'
168 mdstate = 'u'
169
169
170 self._readmergedriver = bits[0]
170 self._readmergedriver = bits[0]
171 self._mdstate = mdstate
171 self._mdstate = mdstate
172 elif rtype in 'FDCP':
172 elif rtype in 'FDCP':
173 bits = record.split('\0')
173 bits = record.split('\0')
174 self._state[bits[0]] = bits[1:]
174 self._state[bits[0]] = bits[1:]
175 elif rtype == 'f':
175 elif rtype == 'f':
176 filename, rawextras = record.split('\0', 1)
176 filename, rawextras = record.split('\0', 1)
177 extraparts = rawextras.split('\0')
177 extraparts = rawextras.split('\0')
178 extras = {}
178 extras = {}
179 i = 0
179 i = 0
180 while i < len(extraparts):
180 while i < len(extraparts):
181 extras[extraparts[i]] = extraparts[i + 1]
181 extras[extraparts[i]] = extraparts[i + 1]
182 i += 2
182 i += 2
183
183
184 self._stateextras[filename] = extras
184 self._stateextras[filename] = extras
185 elif rtype == 'l':
185 elif rtype == 'l':
186 labels = record.split('\0', 2)
186 labels = record.split('\0', 2)
187 self._labels = [l for l in labels if len(l) > 0]
187 self._labels = [l for l in labels if len(l) > 0]
188 elif not rtype.islower():
188 elif not rtype.islower():
189 unsupported.add(rtype)
189 unsupported.add(rtype)
190 self._results = {}
190 self._results = {}
191 self._dirty = False
191 self._dirty = False
192
192
193 if unsupported:
193 if unsupported:
194 raise error.UnsupportedMergeRecords(unsupported)
194 raise error.UnsupportedMergeRecords(unsupported)
195
195
196 def _readrecords(self):
196 def _readrecords(self):
197 """Read merge state from disk and return a list of record (TYPE, data)
197 """Read merge state from disk and return a list of record (TYPE, data)
198
198
199 We read data from both v1 and v2 files and decide which one to use.
199 We read data from both v1 and v2 files and decide which one to use.
200
200
201 V1 has been used by version prior to 2.9.1 and contains less data than
201 V1 has been used by version prior to 2.9.1 and contains less data than
202 v2. We read both versions and check if no data in v2 contradicts
202 v2. We read both versions and check if no data in v2 contradicts
203 v1. If there is not contradiction we can safely assume that both v1
203 v1. If there is not contradiction we can safely assume that both v1
204 and v2 were written at the same time and use the extract data in v2. If
204 and v2 were written at the same time and use the extract data in v2. If
205 there is contradiction we ignore v2 content as we assume an old version
205 there is contradiction we ignore v2 content as we assume an old version
206 of Mercurial has overwritten the mergestate file and left an old v2
206 of Mercurial has overwritten the mergestate file and left an old v2
207 file around.
207 file around.
208
208
209 returns list of record [(TYPE, data), ...]"""
209 returns list of record [(TYPE, data), ...]"""
210 v1records = self._readrecordsv1()
210 v1records = self._readrecordsv1()
211 v2records = self._readrecordsv2()
211 v2records = self._readrecordsv2()
212 if self._v1v2match(v1records, v2records):
212 if self._v1v2match(v1records, v2records):
213 return v2records
213 return v2records
214 else:
214 else:
215 # v1 file is newer than v2 file, use it
215 # v1 file is newer than v2 file, use it
216 # we have to infer the "other" changeset of the merge
216 # we have to infer the "other" changeset of the merge
217 # we cannot do better than that with v1 of the format
217 # we cannot do better than that with v1 of the format
218 mctx = self._repo[None].parents()[-1]
218 mctx = self._repo[None].parents()[-1]
219 v1records.append(('O', mctx.hex()))
219 v1records.append(('O', mctx.hex()))
220 # add place holder "other" file node information
220 # add place holder "other" file node information
221 # nobody is using it yet so we do no need to fetch the data
221 # nobody is using it yet so we do no need to fetch the data
222 # if mctx was wrong `mctx[bits[-2]]` may fails.
222 # if mctx was wrong `mctx[bits[-2]]` may fails.
223 for idx, r in enumerate(v1records):
223 for idx, r in enumerate(v1records):
224 if r[0] == 'F':
224 if r[0] == 'F':
225 bits = r[1].split('\0')
225 bits = r[1].split('\0')
226 bits.insert(-2, '')
226 bits.insert(-2, '')
227 v1records[idx] = (r[0], '\0'.join(bits))
227 v1records[idx] = (r[0], '\0'.join(bits))
228 return v1records
228 return v1records
229
229
230 def _v1v2match(self, v1records, v2records):
230 def _v1v2match(self, v1records, v2records):
231 oldv2 = set() # old format version of v2 record
231 oldv2 = set() # old format version of v2 record
232 for rec in v2records:
232 for rec in v2records:
233 if rec[0] == 'L':
233 if rec[0] == 'L':
234 oldv2.add(rec)
234 oldv2.add(rec)
235 elif rec[0] == 'F':
235 elif rec[0] == 'F':
236 # drop the onode data (not contained in v1)
236 # drop the onode data (not contained in v1)
237 oldv2.add(('F', _droponode(rec[1])))
237 oldv2.add(('F', _droponode(rec[1])))
238 for rec in v1records:
238 for rec in v1records:
239 if rec not in oldv2:
239 if rec not in oldv2:
240 return False
240 return False
241 else:
241 else:
242 return True
242 return True
243
243
244 def _readrecordsv1(self):
244 def _readrecordsv1(self):
245 """read on disk merge state for version 1 file
245 """read on disk merge state for version 1 file
246
246
247 returns list of record [(TYPE, data), ...]
247 returns list of record [(TYPE, data), ...]
248
248
249 Note: the "F" data from this file are one entry short
249 Note: the "F" data from this file are one entry short
250 (no "other file node" entry)
250 (no "other file node" entry)
251 """
251 """
252 records = []
252 records = []
253 try:
253 try:
254 f = self._repo.vfs(self.statepathv1)
254 f = self._repo.vfs(self.statepathv1)
255 for i, l in enumerate(f):
255 for i, l in enumerate(f):
256 if i == 0:
256 if i == 0:
257 records.append(('L', l[:-1]))
257 records.append(('L', l[:-1]))
258 else:
258 else:
259 records.append(('F', l[:-1]))
259 records.append(('F', l[:-1]))
260 f.close()
260 f.close()
261 except IOError as err:
261 except IOError as err:
262 if err.errno != errno.ENOENT:
262 if err.errno != errno.ENOENT:
263 raise
263 raise
264 return records
264 return records
265
265
266 def _readrecordsv2(self):
266 def _readrecordsv2(self):
267 """read on disk merge state for version 2 file
267 """read on disk merge state for version 2 file
268
268
269 This format is a list of arbitrary records of the form:
269 This format is a list of arbitrary records of the form:
270
270
271 [type][length][content]
271 [type][length][content]
272
272
273 `type` is a single character, `length` is a 4 byte integer, and
273 `type` is a single character, `length` is a 4 byte integer, and
274 `content` is an arbitrary byte sequence of length `length`.
274 `content` is an arbitrary byte sequence of length `length`.
275
275
276 Mercurial versions prior to 3.7 have a bug where if there are
276 Mercurial versions prior to 3.7 have a bug where if there are
277 unsupported mandatory merge records, attempting to clear out the merge
277 unsupported mandatory merge records, attempting to clear out the merge
278 state with hg update --clean or similar aborts. The 't' record type
278 state with hg update --clean or similar aborts. The 't' record type
279 works around that by writing out what those versions treat as an
279 works around that by writing out what those versions treat as an
280 advisory record, but later versions interpret as special: the first
280 advisory record, but later versions interpret as special: the first
281 character is the 'real' record type and everything onwards is the data.
281 character is the 'real' record type and everything onwards is the data.
282
282
283 Returns list of records [(TYPE, data), ...]."""
283 Returns list of records [(TYPE, data), ...]."""
284 records = []
284 records = []
285 try:
285 try:
286 f = self._repo.vfs(self.statepathv2)
286 f = self._repo.vfs(self.statepathv2)
287 data = f.read()
287 data = f.read()
288 off = 0
288 off = 0
289 end = len(data)
289 end = len(data)
290 while off < end:
290 while off < end:
291 rtype = data[off]
291 rtype = data[off]
292 off += 1
292 off += 1
293 length = _unpack('>I', data[off:(off + 4)])[0]
293 length = _unpack('>I', data[off:(off + 4)])[0]
294 off += 4
294 off += 4
295 record = data[off:(off + length)]
295 record = data[off:(off + length)]
296 off += length
296 off += length
297 if rtype == 't':
297 if rtype == 't':
298 rtype, record = record[0], record[1:]
298 rtype, record = record[0], record[1:]
299 records.append((rtype, record))
299 records.append((rtype, record))
300 f.close()
300 f.close()
301 except IOError as err:
301 except IOError as err:
302 if err.errno != errno.ENOENT:
302 if err.errno != errno.ENOENT:
303 raise
303 raise
304 return records
304 return records
305
305
306 @util.propertycache
306 @util.propertycache
307 def mergedriver(self):
307 def mergedriver(self):
308 # protect against the following:
308 # protect against the following:
309 # - A configures a malicious merge driver in their hgrc, then
309 # - A configures a malicious merge driver in their hgrc, then
310 # pauses the merge
310 # pauses the merge
311 # - A edits their hgrc to remove references to the merge driver
311 # - A edits their hgrc to remove references to the merge driver
312 # - A gives a copy of their entire repo, including .hg, to B
312 # - A gives a copy of their entire repo, including .hg, to B
313 # - B inspects .hgrc and finds it to be clean
313 # - B inspects .hgrc and finds it to be clean
314 # - B then continues the merge and the malicious merge driver
314 # - B then continues the merge and the malicious merge driver
315 # gets invoked
315 # gets invoked
316 configmergedriver = self._repo.ui.config('experimental', 'mergedriver')
316 configmergedriver = self._repo.ui.config('experimental', 'mergedriver')
317 if (self._readmergedriver is not None
317 if (self._readmergedriver is not None
318 and self._readmergedriver != configmergedriver):
318 and self._readmergedriver != configmergedriver):
319 raise error.ConfigError(
319 raise error.ConfigError(
320 _("merge driver changed since merge started"),
320 _("merge driver changed since merge started"),
321 hint=_("revert merge driver change or abort merge"))
321 hint=_("revert merge driver change or abort merge"))
322
322
323 return configmergedriver
323 return configmergedriver
324
324
325 @util.propertycache
325 @util.propertycache
326 def localctx(self):
326 def localctx(self):
327 if self._local is None:
327 if self._local is None:
328 msg = "localctx accessed but self._local isn't set"
328 msg = "localctx accessed but self._local isn't set"
329 raise error.ProgrammingError(msg)
329 raise error.ProgrammingError(msg)
330 return self._repo[self._local]
330 return self._repo[self._local]
331
331
332 @util.propertycache
332 @util.propertycache
333 def otherctx(self):
333 def otherctx(self):
334 if self._other is None:
334 if self._other is None:
335 msg = "otherctx accessed but self._other isn't set"
335 msg = "otherctx accessed but self._other isn't set"
336 raise error.ProgrammingError(msg)
336 raise error.ProgrammingError(msg)
337 return self._repo[self._other]
337 return self._repo[self._other]
338
338
339 def active(self):
339 def active(self):
340 """Whether mergestate is active.
340 """Whether mergestate is active.
341
341
342 Returns True if there appears to be mergestate. This is a rough proxy
342 Returns True if there appears to be mergestate. This is a rough proxy
343 for "is a merge in progress."
343 for "is a merge in progress."
344 """
344 """
345 # Check local variables before looking at filesystem for performance
345 # Check local variables before looking at filesystem for performance
346 # reasons.
346 # reasons.
347 return bool(self._local) or bool(self._state) or \
347 return bool(self._local) or bool(self._state) or \
348 self._repo.vfs.exists(self.statepathv1) or \
348 self._repo.vfs.exists(self.statepathv1) or \
349 self._repo.vfs.exists(self.statepathv2)
349 self._repo.vfs.exists(self.statepathv2)
350
350
351 def commit(self):
351 def commit(self):
352 """Write current state on disk (if necessary)"""
352 """Write current state on disk (if necessary)"""
353 if self._dirty:
353 if self._dirty:
354 records = self._makerecords()
354 records = self._makerecords()
355 self._writerecords(records)
355 self._writerecords(records)
356 self._dirty = False
356 self._dirty = False
357
357
358 def _makerecords(self):
358 def _makerecords(self):
359 records = []
359 records = []
360 records.append(('L', hex(self._local)))
360 records.append(('L', hex(self._local)))
361 records.append(('O', hex(self._other)))
361 records.append(('O', hex(self._other)))
362 if self.mergedriver:
362 if self.mergedriver:
363 records.append(('m', '\0'.join([
363 records.append(('m', '\0'.join([
364 self.mergedriver, self._mdstate])))
364 self.mergedriver, self._mdstate])))
365 # Write out state items. In all cases, the value of the state map entry
365 # Write out state items. In all cases, the value of the state map entry
366 # is written as the contents of the record. The record type depends on
366 # is written as the contents of the record. The record type depends on
367 # the type of state that is stored, and capital-letter records are used
367 # the type of state that is stored, and capital-letter records are used
368 # to prevent older versions of Mercurial that do not support the feature
368 # to prevent older versions of Mercurial that do not support the feature
369 # from loading them.
369 # from loading them.
370 for filename, v in self._state.iteritems():
370 for filename, v in self._state.iteritems():
371 if v[0] == 'd':
371 if v[0] == 'd':
372 # Driver-resolved merge. These are stored in 'D' records.
372 # Driver-resolved merge. These are stored in 'D' records.
373 records.append(('D', '\0'.join([filename] + v)))
373 records.append(('D', '\0'.join([filename] + v)))
374 elif v[0] in ('pu', 'pr'):
374 elif v[0] in ('pu', 'pr'):
375 # Path conflicts. These are stored in 'P' records. The current
375 # Path conflicts. These are stored in 'P' records. The current
376 # resolution state ('pu' or 'pr') is stored within the record.
376 # resolution state ('pu' or 'pr') is stored within the record.
377 records.append(('P', '\0'.join([filename] + v)))
377 records.append(('P', '\0'.join([filename] + v)))
378 elif v[1] == nullhex or v[6] == nullhex:
378 elif v[1] == nullhex or v[6] == nullhex:
379 # Change/Delete or Delete/Change conflicts. These are stored in
379 # Change/Delete or Delete/Change conflicts. These are stored in
380 # 'C' records. v[1] is the local file, and is nullhex when the
380 # 'C' records. v[1] is the local file, and is nullhex when the
381 # file is deleted locally ('dc'). v[6] is the remote file, and
381 # file is deleted locally ('dc'). v[6] is the remote file, and
382 # is nullhex when the file is deleted remotely ('cd').
382 # is nullhex when the file is deleted remotely ('cd').
383 records.append(('C', '\0'.join([filename] + v)))
383 records.append(('C', '\0'.join([filename] + v)))
384 else:
384 else:
385 # Normal files. These are stored in 'F' records.
385 # Normal files. These are stored in 'F' records.
386 records.append(('F', '\0'.join([filename] + v)))
386 records.append(('F', '\0'.join([filename] + v)))
387 for filename, extras in sorted(self._stateextras.iteritems()):
387 for filename, extras in sorted(self._stateextras.iteritems()):
388 rawextras = '\0'.join('%s\0%s' % (k, v) for k, v in
388 rawextras = '\0'.join('%s\0%s' % (k, v) for k, v in
389 extras.iteritems())
389 extras.iteritems())
390 records.append(('f', '%s\0%s' % (filename, rawextras)))
390 records.append(('f', '%s\0%s' % (filename, rawextras)))
391 if self._labels is not None:
391 if self._labels is not None:
392 labels = '\0'.join(self._labels)
392 labels = '\0'.join(self._labels)
393 records.append(('l', labels))
393 records.append(('l', labels))
394 return records
394 return records
395
395
396 def _writerecords(self, records):
396 def _writerecords(self, records):
397 """Write current state on disk (both v1 and v2)"""
397 """Write current state on disk (both v1 and v2)"""
398 self._writerecordsv1(records)
398 self._writerecordsv1(records)
399 self._writerecordsv2(records)
399 self._writerecordsv2(records)
400
400
401 def _writerecordsv1(self, records):
401 def _writerecordsv1(self, records):
402 """Write current state on disk in a version 1 file"""
402 """Write current state on disk in a version 1 file"""
403 f = self._repo.vfs(self.statepathv1, 'w')
403 f = self._repo.vfs(self.statepathv1, 'w')
404 irecords = iter(records)
404 irecords = iter(records)
405 lrecords = next(irecords)
405 lrecords = next(irecords)
406 assert lrecords[0] == 'L'
406 assert lrecords[0] == 'L'
407 f.write(hex(self._local) + '\n')
407 f.write(hex(self._local) + '\n')
408 for rtype, data in irecords:
408 for rtype, data in irecords:
409 if rtype == 'F':
409 if rtype == 'F':
410 f.write('%s\n' % _droponode(data))
410 f.write('%s\n' % _droponode(data))
411 f.close()
411 f.close()
412
412
413 def _writerecordsv2(self, records):
413 def _writerecordsv2(self, records):
414 """Write current state on disk in a version 2 file
414 """Write current state on disk in a version 2 file
415
415
416 See the docstring for _readrecordsv2 for why we use 't'."""
416 See the docstring for _readrecordsv2 for why we use 't'."""
417 # these are the records that all version 2 clients can read
417 # these are the records that all version 2 clients can read
418 whitelist = 'LOF'
418 whitelist = 'LOF'
419 f = self._repo.vfs(self.statepathv2, 'w')
419 f = self._repo.vfs(self.statepathv2, 'w')
420 for key, data in records:
420 for key, data in records:
421 assert len(key) == 1
421 assert len(key) == 1
422 if key not in whitelist:
422 if key not in whitelist:
423 key, data = 't', '%s%s' % (key, data)
423 key, data = 't', '%s%s' % (key, data)
424 format = '>sI%is' % len(data)
424 format = '>sI%is' % len(data)
425 f.write(_pack(format, key, len(data), data))
425 f.write(_pack(format, key, len(data), data))
426 f.close()
426 f.close()
427
427
428 def add(self, fcl, fco, fca, fd):
428 def add(self, fcl, fco, fca, fd):
429 """add a new (potentially?) conflicting file the merge state
429 """add a new (potentially?) conflicting file the merge state
430 fcl: file context for local,
430 fcl: file context for local,
431 fco: file context for remote,
431 fco: file context for remote,
432 fca: file context for ancestors,
432 fca: file context for ancestors,
433 fd: file path of the resulting merge.
433 fd: file path of the resulting merge.
434
434
435 note: also write the local version to the `.hg/merge` directory.
435 note: also write the local version to the `.hg/merge` directory.
436 """
436 """
437 if fcl.isabsent():
437 if fcl.isabsent():
438 hash = nullhex
438 hash = nullhex
439 else:
439 else:
440 hash = hex(hashlib.sha1(fcl.path()).digest())
440 hash = hex(hashlib.sha1(fcl.path()).digest())
441 self._repo.vfs.write('merge/' + hash, fcl.data())
441 self._repo.vfs.write('merge/' + hash, fcl.data())
442 self._state[fd] = ['u', hash, fcl.path(),
442 self._state[fd] = ['u', hash, fcl.path(),
443 fca.path(), hex(fca.filenode()),
443 fca.path(), hex(fca.filenode()),
444 fco.path(), hex(fco.filenode()),
444 fco.path(), hex(fco.filenode()),
445 fcl.flags()]
445 fcl.flags()]
446 self._stateextras[fd] = {'ancestorlinknode': hex(fca.node())}
446 self._stateextras[fd] = {'ancestorlinknode': hex(fca.node())}
447 self._dirty = True
447 self._dirty = True
448
448
449 def addpath(self, path, frename, forigin):
449 def addpath(self, path, frename, forigin):
450 """add a new conflicting path to the merge state
450 """add a new conflicting path to the merge state
451 path: the path that conflicts
451 path: the path that conflicts
452 frename: the filename the conflicting file was renamed to
452 frename: the filename the conflicting file was renamed to
453 forigin: origin of the file ('l' or 'r' for local/remote)
453 forigin: origin of the file ('l' or 'r' for local/remote)
454 """
454 """
455 self._state[path] = ['pu', frename, forigin]
455 self._state[path] = ['pu', frename, forigin]
456 self._dirty = True
456 self._dirty = True
457
457
458 def __contains__(self, dfile):
458 def __contains__(self, dfile):
459 return dfile in self._state
459 return dfile in self._state
460
460
461 def __getitem__(self, dfile):
461 def __getitem__(self, dfile):
462 return self._state[dfile][0]
462 return self._state[dfile][0]
463
463
464 def __iter__(self):
464 def __iter__(self):
465 return iter(sorted(self._state))
465 return iter(sorted(self._state))
466
466
467 def files(self):
467 def files(self):
468 return self._state.keys()
468 return self._state.keys()
469
469
470 def mark(self, dfile, state):
470 def mark(self, dfile, state):
471 self._state[dfile][0] = state
471 self._state[dfile][0] = state
472 self._dirty = True
472 self._dirty = True
473
473
474 def mdstate(self):
474 def mdstate(self):
475 return self._mdstate
475 return self._mdstate
476
476
477 def unresolved(self):
477 def unresolved(self):
478 """Obtain the paths of unresolved files."""
478 """Obtain the paths of unresolved files."""
479
479
480 for f, entry in self._state.iteritems():
480 for f, entry in self._state.iteritems():
481 if entry[0] in ('u', 'pu'):
481 if entry[0] in ('u', 'pu'):
482 yield f
482 yield f
483
483
484 def driverresolved(self):
484 def driverresolved(self):
485 """Obtain the paths of driver-resolved files."""
485 """Obtain the paths of driver-resolved files."""
486
486
487 for f, entry in self._state.items():
487 for f, entry in self._state.items():
488 if entry[0] == 'd':
488 if entry[0] == 'd':
489 yield f
489 yield f
490
490
491 def extras(self, filename):
491 def extras(self, filename):
492 return self._stateextras.setdefault(filename, {})
492 return self._stateextras.setdefault(filename, {})
493
493
494 def _resolve(self, preresolve, dfile, wctx):
494 def _resolve(self, preresolve, dfile, wctx):
495 """rerun merge process for file path `dfile`"""
495 """rerun merge process for file path `dfile`"""
496 if self[dfile] in 'rd':
496 if self[dfile] in 'rd':
497 return True, 0
497 return True, 0
498 stateentry = self._state[dfile]
498 stateentry = self._state[dfile]
499 state, hash, lfile, afile, anode, ofile, onode, flags = stateentry
499 state, hash, lfile, afile, anode, ofile, onode, flags = stateentry
500 octx = self._repo[self._other]
500 octx = self._repo[self._other]
501 extras = self.extras(dfile)
501 extras = self.extras(dfile)
502 anccommitnode = extras.get('ancestorlinknode')
502 anccommitnode = extras.get('ancestorlinknode')
503 if anccommitnode:
503 if anccommitnode:
504 actx = self._repo[anccommitnode]
504 actx = self._repo[anccommitnode]
505 else:
505 else:
506 actx = None
506 actx = None
507 fcd = self._filectxorabsent(hash, wctx, dfile)
507 fcd = self._filectxorabsent(hash, wctx, dfile)
508 fco = self._filectxorabsent(onode, octx, ofile)
508 fco = self._filectxorabsent(onode, octx, ofile)
509 # TODO: move this to filectxorabsent
509 # TODO: move this to filectxorabsent
510 fca = self._repo.filectx(afile, fileid=anode, changeid=actx)
510 fca = self._repo.filectx(afile, fileid=anode, changeid=actx)
511 # "premerge" x flags
511 # "premerge" x flags
512 flo = fco.flags()
512 flo = fco.flags()
513 fla = fca.flags()
513 fla = fca.flags()
514 if 'x' in flags + flo + fla and 'l' not in flags + flo + fla:
514 if 'x' in flags + flo + fla and 'l' not in flags + flo + fla:
515 if fca.node() == nullid and flags != flo:
515 if fca.node() == nullid and flags != flo:
516 if preresolve:
516 if preresolve:
517 self._repo.ui.warn(
517 self._repo.ui.warn(
518 _('warning: cannot merge flags for %s '
518 _('warning: cannot merge flags for %s '
519 'without common ancestor - keeping local flags\n')
519 'without common ancestor - keeping local flags\n')
520 % afile)
520 % afile)
521 elif flags == fla:
521 elif flags == fla:
522 flags = flo
522 flags = flo
523 if preresolve:
523 if preresolve:
524 # restore local
524 # restore local
525 if hash != nullhex:
525 if hash != nullhex:
526 f = self._repo.vfs('merge/' + hash)
526 f = self._repo.vfs('merge/' + hash)
527 wctx[dfile].write(f.read(), flags)
527 wctx[dfile].write(f.read(), flags)
528 f.close()
528 f.close()
529 else:
529 else:
530 wctx[dfile].remove(ignoremissing=True)
530 wctx[dfile].remove(ignoremissing=True)
531 complete, r, deleted = filemerge.premerge(self._repo, wctx,
531 complete, r, deleted = filemerge.premerge(self._repo, wctx,
532 self._local, lfile, fcd,
532 self._local, lfile, fcd,
533 fco, fca,
533 fco, fca,
534 labels=self._labels)
534 labels=self._labels)
535 else:
535 else:
536 complete, r, deleted = filemerge.filemerge(self._repo, wctx,
536 complete, r, deleted = filemerge.filemerge(self._repo, wctx,
537 self._local, lfile, fcd,
537 self._local, lfile, fcd,
538 fco, fca,
538 fco, fca,
539 labels=self._labels)
539 labels=self._labels)
540 if r is None:
540 if r is None:
541 # no real conflict
541 # no real conflict
542 del self._state[dfile]
542 del self._state[dfile]
543 self._stateextras.pop(dfile, None)
543 self._stateextras.pop(dfile, None)
544 self._dirty = True
544 self._dirty = True
545 elif not r:
545 elif not r:
546 self.mark(dfile, 'r')
546 self.mark(dfile, 'r')
547
547
548 if complete:
548 if complete:
549 action = None
549 action = None
550 if deleted:
550 if deleted:
551 if fcd.isabsent():
551 if fcd.isabsent():
552 # dc: local picked. Need to drop if present, which may
552 # dc: local picked. Need to drop if present, which may
553 # happen on re-resolves.
553 # happen on re-resolves.
554 action = 'f'
554 action = 'f'
555 else:
555 else:
556 # cd: remote picked (or otherwise deleted)
556 # cd: remote picked (or otherwise deleted)
557 action = 'r'
557 action = 'r'
558 else:
558 else:
559 if fcd.isabsent(): # dc: remote picked
559 if fcd.isabsent(): # dc: remote picked
560 action = 'g'
560 action = 'g'
561 elif fco.isabsent(): # cd: local picked
561 elif fco.isabsent(): # cd: local picked
562 if dfile in self.localctx:
562 if dfile in self.localctx:
563 action = 'am'
563 action = 'am'
564 else:
564 else:
565 action = 'a'
565 action = 'a'
566 # else: regular merges (no action necessary)
566 # else: regular merges (no action necessary)
567 self._results[dfile] = r, action
567 self._results[dfile] = r, action
568
568
569 return complete, r
569 return complete, r
570
570
571 def _filectxorabsent(self, hexnode, ctx, f):
571 def _filectxorabsent(self, hexnode, ctx, f):
572 if hexnode == nullhex:
572 if hexnode == nullhex:
573 return filemerge.absentfilectx(ctx, f)
573 return filemerge.absentfilectx(ctx, f)
574 else:
574 else:
575 return ctx[f]
575 return ctx[f]
576
576
577 def preresolve(self, dfile, wctx):
577 def preresolve(self, dfile, wctx):
578 """run premerge process for dfile
578 """run premerge process for dfile
579
579
580 Returns whether the merge is complete, and the exit code."""
580 Returns whether the merge is complete, and the exit code."""
581 return self._resolve(True, dfile, wctx)
581 return self._resolve(True, dfile, wctx)
582
582
583 def resolve(self, dfile, wctx):
583 def resolve(self, dfile, wctx):
584 """run merge process (assuming premerge was run) for dfile
584 """run merge process (assuming premerge was run) for dfile
585
585
586 Returns the exit code of the merge."""
586 Returns the exit code of the merge."""
587 return self._resolve(False, dfile, wctx)[1]
587 return self._resolve(False, dfile, wctx)[1]
588
588
589 def counts(self):
589 def counts(self):
590 """return counts for updated, merged and removed files in this
590 """return counts for updated, merged and removed files in this
591 session"""
591 session"""
592 updated, merged, removed = 0, 0, 0
592 updated, merged, removed = 0, 0, 0
593 for r, action in self._results.itervalues():
593 for r, action in self._results.itervalues():
594 if r is None:
594 if r is None:
595 updated += 1
595 updated += 1
596 elif r == 0:
596 elif r == 0:
597 if action == 'r':
597 if action == 'r':
598 removed += 1
598 removed += 1
599 else:
599 else:
600 merged += 1
600 merged += 1
601 return updated, merged, removed
601 return updated, merged, removed
602
602
603 def unresolvedcount(self):
603 def unresolvedcount(self):
604 """get unresolved count for this merge (persistent)"""
604 """get unresolved count for this merge (persistent)"""
605 return len(list(self.unresolved()))
605 return len(list(self.unresolved()))
606
606
607 def actions(self):
607 def actions(self):
608 """return lists of actions to perform on the dirstate"""
608 """return lists of actions to perform on the dirstate"""
609 actions = {'r': [], 'f': [], 'a': [], 'am': [], 'g': []}
609 actions = {'r': [], 'f': [], 'a': [], 'am': [], 'g': []}
610 for f, (r, action) in self._results.iteritems():
610 for f, (r, action) in self._results.iteritems():
611 if action is not None:
611 if action is not None:
612 actions[action].append((f, None, "merge result"))
612 actions[action].append((f, None, "merge result"))
613 return actions
613 return actions
614
614
615 def recordactions(self):
615 def recordactions(self):
616 """record remove/add/get actions in the dirstate"""
616 """record remove/add/get actions in the dirstate"""
617 branchmerge = self._repo.dirstate.p2() != nullid
617 branchmerge = self._repo.dirstate.p2() != nullid
618 recordupdates(self._repo, self.actions(), branchmerge)
618 recordupdates(self._repo, self.actions(), branchmerge)
619
619
620 def queueremove(self, f):
620 def queueremove(self, f):
621 """queues a file to be removed from the dirstate
621 """queues a file to be removed from the dirstate
622
622
623 Meant for use by custom merge drivers."""
623 Meant for use by custom merge drivers."""
624 self._results[f] = 0, 'r'
624 self._results[f] = 0, 'r'
625
625
626 def queueadd(self, f):
626 def queueadd(self, f):
627 """queues a file to be added to the dirstate
627 """queues a file to be added to the dirstate
628
628
629 Meant for use by custom merge drivers."""
629 Meant for use by custom merge drivers."""
630 self._results[f] = 0, 'a'
630 self._results[f] = 0, 'a'
631
631
632 def queueget(self, f):
632 def queueget(self, f):
633 """queues a file to be marked modified in the dirstate
633 """queues a file to be marked modified in the dirstate
634
634
635 Meant for use by custom merge drivers."""
635 Meant for use by custom merge drivers."""
636 self._results[f] = 0, 'g'
636 self._results[f] = 0, 'g'
637
637
638 def _getcheckunknownconfig(repo, section, name):
638 def _getcheckunknownconfig(repo, section, name):
639 config = repo.ui.config(section, name)
639 config = repo.ui.config(section, name)
640 valid = ['abort', 'ignore', 'warn']
640 valid = ['abort', 'ignore', 'warn']
641 if config not in valid:
641 if config not in valid:
642 validstr = ', '.join(["'" + v + "'" for v in valid])
642 validstr = ', '.join(["'" + v + "'" for v in valid])
643 raise error.ConfigError(_("%s.%s not valid "
643 raise error.ConfigError(_("%s.%s not valid "
644 "('%s' is none of %s)")
644 "('%s' is none of %s)")
645 % (section, name, config, validstr))
645 % (section, name, config, validstr))
646 return config
646 return config
647
647
648 def _checkunknownfile(repo, wctx, mctx, f, f2=None):
648 def _checkunknownfile(repo, wctx, mctx, f, f2=None):
649 if f2 is None:
649 if f2 is None:
650 f2 = f
650 f2 = f
651 return (repo.wvfs.audit.check(f)
651 return (repo.wvfs.audit.check(f)
652 and repo.wvfs.isfileorlink(f)
652 and repo.wvfs.isfileorlink(f)
653 and repo.dirstate.normalize(f) not in repo.dirstate
653 and repo.dirstate.normalize(f) not in repo.dirstate
654 and mctx[f2].cmp(wctx[f]))
654 and mctx[f2].cmp(wctx[f]))
655
655
656 def _checkunknowndirs(repo, f):
656 def _checkunknowndirs(repo, f):
657 """
657 """
658 Look for any unknown files or directories that may have a path conflict
658 Look for any unknown files or directories that may have a path conflict
659 with a file. If any path prefix of the file exists as a file or link,
659 with a file. If any path prefix of the file exists as a file or link,
660 then it conflicts. If the file itself is a directory that contains any
660 then it conflicts. If the file itself is a directory that contains any
661 file that is not tracked, then it conflicts.
661 file that is not tracked, then it conflicts.
662
662
663 Returns the shortest path at which a conflict occurs, or None if there is
663 Returns the shortest path at which a conflict occurs, or None if there is
664 no conflict.
664 no conflict.
665 """
665 """
666
666
667 # Check for path prefixes that exist as unknown files.
667 # Check for path prefixes that exist as unknown files.
668 for p in reversed(list(util.finddirs(f))):
668 for p in reversed(list(util.finddirs(f))):
669 if (repo.wvfs.audit.check(p)
669 if (repo.wvfs.audit.check(p)
670 and repo.wvfs.isfileorlink(p)
670 and repo.wvfs.isfileorlink(p)
671 and repo.dirstate.normalize(p) not in repo.dirstate):
671 and repo.dirstate.normalize(p) not in repo.dirstate):
672 return p
672 return p
673
673
674 # Check if the file conflicts with a directory containing unknown files.
674 # Check if the file conflicts with a directory containing unknown files.
675 if repo.wvfs.audit.check(f) and repo.wvfs.isdir(f):
675 if repo.wvfs.audit.check(f) and repo.wvfs.isdir(f):
676 # Does the directory contain any files that are not in the dirstate?
676 # Does the directory contain any files that are not in the dirstate?
677 for p, dirs, files in repo.wvfs.walk(f):
677 for p, dirs, files in repo.wvfs.walk(f):
678 for fn in files:
678 for fn in files:
679 relf = repo.dirstate.normalize(repo.wvfs.reljoin(p, fn))
679 relf = repo.dirstate.normalize(repo.wvfs.reljoin(p, fn))
680 if relf not in repo.dirstate:
680 if relf not in repo.dirstate:
681 return f
681 return f
682 return None
682 return None
683
683
684 def _checkunknownfiles(repo, wctx, mctx, force, actions, mergeforce):
684 def _checkunknownfiles(repo, wctx, mctx, force, actions, mergeforce):
685 """
685 """
686 Considers any actions that care about the presence of conflicting unknown
686 Considers any actions that care about the presence of conflicting unknown
687 files. For some actions, the result is to abort; for others, it is to
687 files. For some actions, the result is to abort; for others, it is to
688 choose a different action.
688 choose a different action.
689 """
689 """
690 fileconflicts = set()
690 fileconflicts = set()
691 pathconflicts = set()
691 pathconflicts = set()
692 warnconflicts = set()
692 warnconflicts = set()
693 abortconflicts = set()
693 abortconflicts = set()
694 unknownconfig = _getcheckunknownconfig(repo, 'merge', 'checkunknown')
694 unknownconfig = _getcheckunknownconfig(repo, 'merge', 'checkunknown')
695 ignoredconfig = _getcheckunknownconfig(repo, 'merge', 'checkignored')
695 ignoredconfig = _getcheckunknownconfig(repo, 'merge', 'checkignored')
696 pathconfig = repo.ui.configbool('experimental', 'merge.checkpathconflicts')
696 if not force:
697 if not force:
697 def collectconflicts(conflicts, config):
698 def collectconflicts(conflicts, config):
698 if config == 'abort':
699 if config == 'abort':
699 abortconflicts.update(conflicts)
700 abortconflicts.update(conflicts)
700 elif config == 'warn':
701 elif config == 'warn':
701 warnconflicts.update(conflicts)
702 warnconflicts.update(conflicts)
702
703
703 for f, (m, args, msg) in actions.iteritems():
704 for f, (m, args, msg) in actions.iteritems():
704 if m in ('c', 'dc'):
705 if m in ('c', 'dc'):
705 if _checkunknownfile(repo, wctx, mctx, f):
706 if _checkunknownfile(repo, wctx, mctx, f):
706 fileconflicts.add(f)
707 fileconflicts.add(f)
707 elif f not in wctx:
708 elif pathconfig and f not in wctx:
708 path = _checkunknowndirs(repo, f)
709 path = _checkunknowndirs(repo, f)
709 if path is not None:
710 if path is not None:
710 pathconflicts.add(path)
711 pathconflicts.add(path)
711 elif m == 'dg':
712 elif m == 'dg':
712 if _checkunknownfile(repo, wctx, mctx, f, args[0]):
713 if _checkunknownfile(repo, wctx, mctx, f, args[0]):
713 fileconflicts.add(f)
714 fileconflicts.add(f)
714
715
715 allconflicts = fileconflicts | pathconflicts
716 allconflicts = fileconflicts | pathconflicts
716 ignoredconflicts = set([c for c in allconflicts
717 ignoredconflicts = set([c for c in allconflicts
717 if repo.dirstate._ignore(c)])
718 if repo.dirstate._ignore(c)])
718 unknownconflicts = allconflicts - ignoredconflicts
719 unknownconflicts = allconflicts - ignoredconflicts
719 collectconflicts(ignoredconflicts, ignoredconfig)
720 collectconflicts(ignoredconflicts, ignoredconfig)
720 collectconflicts(unknownconflicts, unknownconfig)
721 collectconflicts(unknownconflicts, unknownconfig)
721 else:
722 else:
722 for f, (m, args, msg) in actions.iteritems():
723 for f, (m, args, msg) in actions.iteritems():
723 if m == 'cm':
724 if m == 'cm':
724 fl2, anc = args
725 fl2, anc = args
725 different = _checkunknownfile(repo, wctx, mctx, f)
726 different = _checkunknownfile(repo, wctx, mctx, f)
726 if repo.dirstate._ignore(f):
727 if repo.dirstate._ignore(f):
727 config = ignoredconfig
728 config = ignoredconfig
728 else:
729 else:
729 config = unknownconfig
730 config = unknownconfig
730
731
731 # The behavior when force is True is described by this table:
732 # The behavior when force is True is described by this table:
732 # config different mergeforce | action backup
733 # config different mergeforce | action backup
733 # * n * | get n
734 # * n * | get n
734 # * y y | merge -
735 # * y y | merge -
735 # abort y n | merge - (1)
736 # abort y n | merge - (1)
736 # warn y n | warn + get y
737 # warn y n | warn + get y
737 # ignore y n | get y
738 # ignore y n | get y
738 #
739 #
739 # (1) this is probably the wrong behavior here -- we should
740 # (1) this is probably the wrong behavior here -- we should
740 # probably abort, but some actions like rebases currently
741 # probably abort, but some actions like rebases currently
741 # don't like an abort happening in the middle of
742 # don't like an abort happening in the middle of
742 # merge.update.
743 # merge.update.
743 if not different:
744 if not different:
744 actions[f] = ('g', (fl2, False), "remote created")
745 actions[f] = ('g', (fl2, False), "remote created")
745 elif mergeforce or config == 'abort':
746 elif mergeforce or config == 'abort':
746 actions[f] = ('m', (f, f, None, False, anc),
747 actions[f] = ('m', (f, f, None, False, anc),
747 "remote differs from untracked local")
748 "remote differs from untracked local")
748 elif config == 'abort':
749 elif config == 'abort':
749 abortconflicts.add(f)
750 abortconflicts.add(f)
750 else:
751 else:
751 if config == 'warn':
752 if config == 'warn':
752 warnconflicts.add(f)
753 warnconflicts.add(f)
753 actions[f] = ('g', (fl2, True), "remote created")
754 actions[f] = ('g', (fl2, True), "remote created")
754
755
755 for f in sorted(abortconflicts):
756 for f in sorted(abortconflicts):
756 warn = repo.ui.warn
757 warn = repo.ui.warn
757 if f in pathconflicts:
758 if f in pathconflicts:
758 if repo.wvfs.isfileorlink(f):
759 if repo.wvfs.isfileorlink(f):
759 warn(_("%s: untracked file conflicts with directory\n") % f)
760 warn(_("%s: untracked file conflicts with directory\n") % f)
760 else:
761 else:
761 warn(_("%s: untracked directory conflicts with file\n") % f)
762 warn(_("%s: untracked directory conflicts with file\n") % f)
762 else:
763 else:
763 warn(_("%s: untracked file differs\n") % f)
764 warn(_("%s: untracked file differs\n") % f)
764 if abortconflicts:
765 if abortconflicts:
765 raise error.Abort(_("untracked files in working directory "
766 raise error.Abort(_("untracked files in working directory "
766 "differ from files in requested revision"))
767 "differ from files in requested revision"))
767
768
768 for f in sorted(warnconflicts):
769 for f in sorted(warnconflicts):
769 if repo.wvfs.isfileorlink(f):
770 if repo.wvfs.isfileorlink(f):
770 repo.ui.warn(_("%s: replacing untracked file\n") % f)
771 repo.ui.warn(_("%s: replacing untracked file\n") % f)
771 else:
772 else:
772 repo.ui.warn(_("%s: replacing untracked files in directory\n") % f)
773 repo.ui.warn(_("%s: replacing untracked files in directory\n") % f)
773
774
774 for f, (m, args, msg) in actions.iteritems():
775 for f, (m, args, msg) in actions.iteritems():
775 if m == 'c':
776 if m == 'c':
776 backup = (f in fileconflicts or f in pathconflicts or
777 backup = (f in fileconflicts or f in pathconflicts or
777 any(p in pathconflicts for p in util.finddirs(f)))
778 any(p in pathconflicts for p in util.finddirs(f)))
778 flags, = args
779 flags, = args
779 actions[f] = ('g', (flags, backup), msg)
780 actions[f] = ('g', (flags, backup), msg)
780
781
781 def _forgetremoved(wctx, mctx, branchmerge):
782 def _forgetremoved(wctx, mctx, branchmerge):
782 """
783 """
783 Forget removed files
784 Forget removed files
784
785
785 If we're jumping between revisions (as opposed to merging), and if
786 If we're jumping between revisions (as opposed to merging), and if
786 neither the working directory nor the target rev has the file,
787 neither the working directory nor the target rev has the file,
787 then we need to remove it from the dirstate, to prevent the
788 then we need to remove it from the dirstate, to prevent the
788 dirstate from listing the file when it is no longer in the
789 dirstate from listing the file when it is no longer in the
789 manifest.
790 manifest.
790
791
791 If we're merging, and the other revision has removed a file
792 If we're merging, and the other revision has removed a file
792 that is not present in the working directory, we need to mark it
793 that is not present in the working directory, we need to mark it
793 as removed.
794 as removed.
794 """
795 """
795
796
796 actions = {}
797 actions = {}
797 m = 'f'
798 m = 'f'
798 if branchmerge:
799 if branchmerge:
799 m = 'r'
800 m = 'r'
800 for f in wctx.deleted():
801 for f in wctx.deleted():
801 if f not in mctx:
802 if f not in mctx:
802 actions[f] = m, None, "forget deleted"
803 actions[f] = m, None, "forget deleted"
803
804
804 if not branchmerge:
805 if not branchmerge:
805 for f in wctx.removed():
806 for f in wctx.removed():
806 if f not in mctx:
807 if f not in mctx:
807 actions[f] = 'f', None, "forget removed"
808 actions[f] = 'f', None, "forget removed"
808
809
809 return actions
810 return actions
810
811
811 def _checkcollision(repo, wmf, actions):
812 def _checkcollision(repo, wmf, actions):
812 # build provisional merged manifest up
813 # build provisional merged manifest up
813 pmmf = set(wmf)
814 pmmf = set(wmf)
814
815
815 if actions:
816 if actions:
816 # k, dr, e and rd are no-op
817 # k, dr, e and rd are no-op
817 for m in 'a', 'am', 'f', 'g', 'cd', 'dc':
818 for m in 'a', 'am', 'f', 'g', 'cd', 'dc':
818 for f, args, msg in actions[m]:
819 for f, args, msg in actions[m]:
819 pmmf.add(f)
820 pmmf.add(f)
820 for f, args, msg in actions['r']:
821 for f, args, msg in actions['r']:
821 pmmf.discard(f)
822 pmmf.discard(f)
822 for f, args, msg in actions['dm']:
823 for f, args, msg in actions['dm']:
823 f2, flags = args
824 f2, flags = args
824 pmmf.discard(f2)
825 pmmf.discard(f2)
825 pmmf.add(f)
826 pmmf.add(f)
826 for f, args, msg in actions['dg']:
827 for f, args, msg in actions['dg']:
827 pmmf.add(f)
828 pmmf.add(f)
828 for f, args, msg in actions['m']:
829 for f, args, msg in actions['m']:
829 f1, f2, fa, move, anc = args
830 f1, f2, fa, move, anc = args
830 if move:
831 if move:
831 pmmf.discard(f1)
832 pmmf.discard(f1)
832 pmmf.add(f)
833 pmmf.add(f)
833
834
834 # check case-folding collision in provisional merged manifest
835 # check case-folding collision in provisional merged manifest
835 foldmap = {}
836 foldmap = {}
836 for f in pmmf:
837 for f in pmmf:
837 fold = util.normcase(f)
838 fold = util.normcase(f)
838 if fold in foldmap:
839 if fold in foldmap:
839 raise error.Abort(_("case-folding collision between %s and %s")
840 raise error.Abort(_("case-folding collision between %s and %s")
840 % (f, foldmap[fold]))
841 % (f, foldmap[fold]))
841 foldmap[fold] = f
842 foldmap[fold] = f
842
843
843 # check case-folding of directories
844 # check case-folding of directories
844 foldprefix = unfoldprefix = lastfull = ''
845 foldprefix = unfoldprefix = lastfull = ''
845 for fold, f in sorted(foldmap.items()):
846 for fold, f in sorted(foldmap.items()):
846 if fold.startswith(foldprefix) and not f.startswith(unfoldprefix):
847 if fold.startswith(foldprefix) and not f.startswith(unfoldprefix):
847 # the folded prefix matches but actual casing is different
848 # the folded prefix matches but actual casing is different
848 raise error.Abort(_("case-folding collision between "
849 raise error.Abort(_("case-folding collision between "
849 "%s and directory of %s") % (lastfull, f))
850 "%s and directory of %s") % (lastfull, f))
850 foldprefix = fold + '/'
851 foldprefix = fold + '/'
851 unfoldprefix = f + '/'
852 unfoldprefix = f + '/'
852 lastfull = f
853 lastfull = f
853
854
854 def driverpreprocess(repo, ms, wctx, labels=None):
855 def driverpreprocess(repo, ms, wctx, labels=None):
855 """run the preprocess step of the merge driver, if any
856 """run the preprocess step of the merge driver, if any
856
857
857 This is currently not implemented -- it's an extension point."""
858 This is currently not implemented -- it's an extension point."""
858 return True
859 return True
859
860
860 def driverconclude(repo, ms, wctx, labels=None):
861 def driverconclude(repo, ms, wctx, labels=None):
861 """run the conclude step of the merge driver, if any
862 """run the conclude step of the merge driver, if any
862
863
863 This is currently not implemented -- it's an extension point."""
864 This is currently not implemented -- it's an extension point."""
864 return True
865 return True
865
866
866 def _filesindirs(repo, manifest, dirs):
867 def _filesindirs(repo, manifest, dirs):
867 """
868 """
868 Generator that yields pairs of all the files in the manifest that are found
869 Generator that yields pairs of all the files in the manifest that are found
869 inside the directories listed in dirs, and which directory they are found
870 inside the directories listed in dirs, and which directory they are found
870 in.
871 in.
871 """
872 """
872 for f in manifest:
873 for f in manifest:
873 for p in util.finddirs(f):
874 for p in util.finddirs(f):
874 if p in dirs:
875 if p in dirs:
875 yield f, p
876 yield f, p
876 break
877 break
877
878
878 def checkpathconflicts(repo, wctx, mctx, actions):
879 def checkpathconflicts(repo, wctx, mctx, actions):
879 """
880 """
880 Check if any actions introduce path conflicts in the repository, updating
881 Check if any actions introduce path conflicts in the repository, updating
881 actions to record or handle the path conflict accordingly.
882 actions to record or handle the path conflict accordingly.
882 """
883 """
883 mf = wctx.manifest()
884 mf = wctx.manifest()
884
885
885 # The set of local files that conflict with a remote directory.
886 # The set of local files that conflict with a remote directory.
886 localconflicts = set()
887 localconflicts = set()
887
888
888 # The set of directories that conflict with a remote file, and so may cause
889 # The set of directories that conflict with a remote file, and so may cause
889 # conflicts if they still contain any files after the merge.
890 # conflicts if they still contain any files after the merge.
890 remoteconflicts = set()
891 remoteconflicts = set()
891
892
892 # The set of directories that appear as both a file and a directory in the
893 # The set of directories that appear as both a file and a directory in the
893 # remote manifest. These indicate an invalid remote manifest, which
894 # remote manifest. These indicate an invalid remote manifest, which
894 # can't be updated to cleanly.
895 # can't be updated to cleanly.
895 invalidconflicts = set()
896 invalidconflicts = set()
896
897
897 # The set of files deleted by all the actions.
898 # The set of files deleted by all the actions.
898 deletedfiles = set()
899 deletedfiles = set()
899
900
900 for f, (m, args, msg) in actions.items():
901 for f, (m, args, msg) in actions.items():
901 if m in ('c', 'dc', 'm', 'cm'):
902 if m in ('c', 'dc', 'm', 'cm'):
902 # This action may create a new local file.
903 # This action may create a new local file.
903 if mf.hasdir(f):
904 if mf.hasdir(f):
904 # The file aliases a local directory. This might be ok if all
905 # The file aliases a local directory. This might be ok if all
905 # the files in the local directory are being deleted. This
906 # the files in the local directory are being deleted. This
906 # will be checked once we know what all the deleted files are.
907 # will be checked once we know what all the deleted files are.
907 remoteconflicts.add(f)
908 remoteconflicts.add(f)
908 for p in util.finddirs(f):
909 for p in util.finddirs(f):
909 if p in mf:
910 if p in mf:
910 if p in mctx:
911 if p in mctx:
911 # The file is in a directory which aliases both a local
912 # The file is in a directory which aliases both a local
912 # and a remote file. This is an internal inconsistency
913 # and a remote file. This is an internal inconsistency
913 # within the remote manifest.
914 # within the remote manifest.
914 invalidconflicts.add(p)
915 invalidconflicts.add(p)
915 else:
916 else:
916 # The file is in a directory which aliases a local file.
917 # The file is in a directory which aliases a local file.
917 # We will need to rename the local file.
918 # We will need to rename the local file.
918 localconflicts.add(p)
919 localconflicts.add(p)
919 if p in actions and actions[p][0] in ('c', 'dc', 'm', 'cm'):
920 if p in actions and actions[p][0] in ('c', 'dc', 'm', 'cm'):
920 # The file is in a directory which aliases a remote file.
921 # The file is in a directory which aliases a remote file.
921 # This is an internal inconsistency within the remote
922 # This is an internal inconsistency within the remote
922 # manifest.
923 # manifest.
923 invalidconflicts.add(p)
924 invalidconflicts.add(p)
924
925
925 # Track the names of all deleted files.
926 # Track the names of all deleted files.
926 if m == 'r':
927 if m == 'r':
927 deletedfiles.add(f)
928 deletedfiles.add(f)
928 if m == 'm':
929 if m == 'm':
929 f1, f2, fa, move, anc = args
930 f1, f2, fa, move, anc = args
930 if move:
931 if move:
931 deletedfiles.add(f1)
932 deletedfiles.add(f1)
932 if m == 'dm':
933 if m == 'dm':
933 f2, flags = args
934 f2, flags = args
934 deletedfiles.add(f2)
935 deletedfiles.add(f2)
935
936
936 # Rename all local conflicting files that have not been deleted.
937 # Rename all local conflicting files that have not been deleted.
937 for p in localconflicts:
938 for p in localconflicts:
938 if p not in deletedfiles:
939 if p not in deletedfiles:
939 ctxname = str(wctx).rstrip('+')
940 ctxname = str(wctx).rstrip('+')
940 pnew = util.safename(p, ctxname, wctx, set(actions.keys()))
941 pnew = util.safename(p, ctxname, wctx, set(actions.keys()))
941 actions[pnew] = ('pr', (p,), "local path conflict")
942 actions[pnew] = ('pr', (p,), "local path conflict")
942 actions[p] = ('p', (pnew, 'l'), "path conflict")
943 actions[p] = ('p', (pnew, 'l'), "path conflict")
943
944
944 if remoteconflicts:
945 if remoteconflicts:
945 # Check if all files in the conflicting directories have been removed.
946 # Check if all files in the conflicting directories have been removed.
946 ctxname = str(mctx).rstrip('+')
947 ctxname = str(mctx).rstrip('+')
947 for f, p in _filesindirs(repo, mf, remoteconflicts):
948 for f, p in _filesindirs(repo, mf, remoteconflicts):
948 if f not in deletedfiles:
949 if f not in deletedfiles:
949 m, args, msg = actions[p]
950 m, args, msg = actions[p]
950 pnew = util.safename(p, ctxname, wctx, set(actions.keys()))
951 pnew = util.safename(p, ctxname, wctx, set(actions.keys()))
951 if m in ('dc', 'm'):
952 if m in ('dc', 'm'):
952 # Action was merge, just update target.
953 # Action was merge, just update target.
953 actions[pnew] = (m, args, msg)
954 actions[pnew] = (m, args, msg)
954 else:
955 else:
955 # Action was create, change to renamed get action.
956 # Action was create, change to renamed get action.
956 fl = args[0]
957 fl = args[0]
957 actions[pnew] = ('dg', (p, fl), "remote path conflict")
958 actions[pnew] = ('dg', (p, fl), "remote path conflict")
958 actions[p] = ('p', (pnew, 'r'), "path conflict")
959 actions[p] = ('p', (pnew, 'r'), "path conflict")
959 remoteconflicts.remove(p)
960 remoteconflicts.remove(p)
960 break
961 break
961
962
962 if invalidconflicts:
963 if invalidconflicts:
963 for p in invalidconflicts:
964 for p in invalidconflicts:
964 repo.ui.warn(_("%s: is both a file and a directory\n") % p)
965 repo.ui.warn(_("%s: is both a file and a directory\n") % p)
965 raise error.Abort(_("destination manifest contains path conflicts"))
966 raise error.Abort(_("destination manifest contains path conflicts"))
966
967
967 def manifestmerge(repo, wctx, p2, pa, branchmerge, force, matcher,
968 def manifestmerge(repo, wctx, p2, pa, branchmerge, force, matcher,
968 acceptremote, followcopies, forcefulldiff=False):
969 acceptremote, followcopies, forcefulldiff=False):
969 """
970 """
970 Merge wctx and p2 with ancestor pa and generate merge action list
971 Merge wctx and p2 with ancestor pa and generate merge action list
971
972
972 branchmerge and force are as passed in to update
973 branchmerge and force are as passed in to update
973 matcher = matcher to filter file lists
974 matcher = matcher to filter file lists
974 acceptremote = accept the incoming changes without prompting
975 acceptremote = accept the incoming changes without prompting
975 """
976 """
976 if matcher is not None and matcher.always():
977 if matcher is not None and matcher.always():
977 matcher = None
978 matcher = None
978
979
979 copy, movewithdir, diverge, renamedelete, dirmove = {}, {}, {}, {}, {}
980 copy, movewithdir, diverge, renamedelete, dirmove = {}, {}, {}, {}, {}
980
981
981 # manifests fetched in order are going to be faster, so prime the caches
982 # manifests fetched in order are going to be faster, so prime the caches
982 [x.manifest() for x in
983 [x.manifest() for x in
983 sorted(wctx.parents() + [p2, pa], key=scmutil.intrev)]
984 sorted(wctx.parents() + [p2, pa], key=scmutil.intrev)]
984
985
985 if followcopies:
986 if followcopies:
986 ret = copies.mergecopies(repo, wctx, p2, pa)
987 ret = copies.mergecopies(repo, wctx, p2, pa)
987 copy, movewithdir, diverge, renamedelete, dirmove = ret
988 copy, movewithdir, diverge, renamedelete, dirmove = ret
988
989
989 boolbm = pycompat.bytestr(bool(branchmerge))
990 boolbm = pycompat.bytestr(bool(branchmerge))
990 boolf = pycompat.bytestr(bool(force))
991 boolf = pycompat.bytestr(bool(force))
991 boolm = pycompat.bytestr(bool(matcher))
992 boolm = pycompat.bytestr(bool(matcher))
992 repo.ui.note(_("resolving manifests\n"))
993 repo.ui.note(_("resolving manifests\n"))
993 repo.ui.debug(" branchmerge: %s, force: %s, partial: %s\n"
994 repo.ui.debug(" branchmerge: %s, force: %s, partial: %s\n"
994 % (boolbm, boolf, boolm))
995 % (boolbm, boolf, boolm))
995 repo.ui.debug(" ancestor: %s, local: %s, remote: %s\n" % (pa, wctx, p2))
996 repo.ui.debug(" ancestor: %s, local: %s, remote: %s\n" % (pa, wctx, p2))
996
997
997 m1, m2, ma = wctx.manifest(), p2.manifest(), pa.manifest()
998 m1, m2, ma = wctx.manifest(), p2.manifest(), pa.manifest()
998 copied = set(copy.values())
999 copied = set(copy.values())
999 copied.update(movewithdir.values())
1000 copied.update(movewithdir.values())
1000
1001
1001 if '.hgsubstate' in m1:
1002 if '.hgsubstate' in m1:
1002 # check whether sub state is modified
1003 # check whether sub state is modified
1003 if any(wctx.sub(s).dirty() for s in wctx.substate):
1004 if any(wctx.sub(s).dirty() for s in wctx.substate):
1004 m1['.hgsubstate'] = modifiednodeid
1005 m1['.hgsubstate'] = modifiednodeid
1005
1006
1006 # Don't use m2-vs-ma optimization if:
1007 # Don't use m2-vs-ma optimization if:
1007 # - ma is the same as m1 or m2, which we're just going to diff again later
1008 # - ma is the same as m1 or m2, which we're just going to diff again later
1008 # - The caller specifically asks for a full diff, which is useful during bid
1009 # - The caller specifically asks for a full diff, which is useful during bid
1009 # merge.
1010 # merge.
1010 if (pa not in ([wctx, p2] + wctx.parents()) and not forcefulldiff):
1011 if (pa not in ([wctx, p2] + wctx.parents()) and not forcefulldiff):
1011 # Identify which files are relevant to the merge, so we can limit the
1012 # Identify which files are relevant to the merge, so we can limit the
1012 # total m1-vs-m2 diff to just those files. This has significant
1013 # total m1-vs-m2 diff to just those files. This has significant
1013 # performance benefits in large repositories.
1014 # performance benefits in large repositories.
1014 relevantfiles = set(ma.diff(m2).keys())
1015 relevantfiles = set(ma.diff(m2).keys())
1015
1016
1016 # For copied and moved files, we need to add the source file too.
1017 # For copied and moved files, we need to add the source file too.
1017 for copykey, copyvalue in copy.iteritems():
1018 for copykey, copyvalue in copy.iteritems():
1018 if copyvalue in relevantfiles:
1019 if copyvalue in relevantfiles:
1019 relevantfiles.add(copykey)
1020 relevantfiles.add(copykey)
1020 for movedirkey in movewithdir:
1021 for movedirkey in movewithdir:
1021 relevantfiles.add(movedirkey)
1022 relevantfiles.add(movedirkey)
1022 filesmatcher = scmutil.matchfiles(repo, relevantfiles)
1023 filesmatcher = scmutil.matchfiles(repo, relevantfiles)
1023 matcher = matchmod.intersectmatchers(matcher, filesmatcher)
1024 matcher = matchmod.intersectmatchers(matcher, filesmatcher)
1024
1025
1025 diff = m1.diff(m2, match=matcher)
1026 diff = m1.diff(m2, match=matcher)
1026
1027
1027 if matcher is None:
1028 if matcher is None:
1028 matcher = matchmod.always('', '')
1029 matcher = matchmod.always('', '')
1029
1030
1030 actions = {}
1031 actions = {}
1031 for f, ((n1, fl1), (n2, fl2)) in diff.iteritems():
1032 for f, ((n1, fl1), (n2, fl2)) in diff.iteritems():
1032 if n1 and n2: # file exists on both local and remote side
1033 if n1 and n2: # file exists on both local and remote side
1033 if f not in ma:
1034 if f not in ma:
1034 fa = copy.get(f, None)
1035 fa = copy.get(f, None)
1035 if fa is not None:
1036 if fa is not None:
1036 actions[f] = ('m', (f, f, fa, False, pa.node()),
1037 actions[f] = ('m', (f, f, fa, False, pa.node()),
1037 "both renamed from " + fa)
1038 "both renamed from " + fa)
1038 else:
1039 else:
1039 actions[f] = ('m', (f, f, None, False, pa.node()),
1040 actions[f] = ('m', (f, f, None, False, pa.node()),
1040 "both created")
1041 "both created")
1041 else:
1042 else:
1042 a = ma[f]
1043 a = ma[f]
1043 fla = ma.flags(f)
1044 fla = ma.flags(f)
1044 nol = 'l' not in fl1 + fl2 + fla
1045 nol = 'l' not in fl1 + fl2 + fla
1045 if n2 == a and fl2 == fla:
1046 if n2 == a and fl2 == fla:
1046 actions[f] = ('k', (), "remote unchanged")
1047 actions[f] = ('k', (), "remote unchanged")
1047 elif n1 == a and fl1 == fla: # local unchanged - use remote
1048 elif n1 == a and fl1 == fla: # local unchanged - use remote
1048 if n1 == n2: # optimization: keep local content
1049 if n1 == n2: # optimization: keep local content
1049 actions[f] = ('e', (fl2,), "update permissions")
1050 actions[f] = ('e', (fl2,), "update permissions")
1050 else:
1051 else:
1051 actions[f] = ('g', (fl2, False), "remote is newer")
1052 actions[f] = ('g', (fl2, False), "remote is newer")
1052 elif nol and n2 == a: # remote only changed 'x'
1053 elif nol and n2 == a: # remote only changed 'x'
1053 actions[f] = ('e', (fl2,), "update permissions")
1054 actions[f] = ('e', (fl2,), "update permissions")
1054 elif nol and n1 == a: # local only changed 'x'
1055 elif nol and n1 == a: # local only changed 'x'
1055 actions[f] = ('g', (fl1, False), "remote is newer")
1056 actions[f] = ('g', (fl1, False), "remote is newer")
1056 else: # both changed something
1057 else: # both changed something
1057 actions[f] = ('m', (f, f, f, False, pa.node()),
1058 actions[f] = ('m', (f, f, f, False, pa.node()),
1058 "versions differ")
1059 "versions differ")
1059 elif n1: # file exists only on local side
1060 elif n1: # file exists only on local side
1060 if f in copied:
1061 if f in copied:
1061 pass # we'll deal with it on m2 side
1062 pass # we'll deal with it on m2 side
1062 elif f in movewithdir: # directory rename, move local
1063 elif f in movewithdir: # directory rename, move local
1063 f2 = movewithdir[f]
1064 f2 = movewithdir[f]
1064 if f2 in m2:
1065 if f2 in m2:
1065 actions[f2] = ('m', (f, f2, None, True, pa.node()),
1066 actions[f2] = ('m', (f, f2, None, True, pa.node()),
1066 "remote directory rename, both created")
1067 "remote directory rename, both created")
1067 else:
1068 else:
1068 actions[f2] = ('dm', (f, fl1),
1069 actions[f2] = ('dm', (f, fl1),
1069 "remote directory rename - move from " + f)
1070 "remote directory rename - move from " + f)
1070 elif f in copy:
1071 elif f in copy:
1071 f2 = copy[f]
1072 f2 = copy[f]
1072 actions[f] = ('m', (f, f2, f2, False, pa.node()),
1073 actions[f] = ('m', (f, f2, f2, False, pa.node()),
1073 "local copied/moved from " + f2)
1074 "local copied/moved from " + f2)
1074 elif f in ma: # clean, a different, no remote
1075 elif f in ma: # clean, a different, no remote
1075 if n1 != ma[f]:
1076 if n1 != ma[f]:
1076 if acceptremote:
1077 if acceptremote:
1077 actions[f] = ('r', None, "remote delete")
1078 actions[f] = ('r', None, "remote delete")
1078 else:
1079 else:
1079 actions[f] = ('cd', (f, None, f, False, pa.node()),
1080 actions[f] = ('cd', (f, None, f, False, pa.node()),
1080 "prompt changed/deleted")
1081 "prompt changed/deleted")
1081 elif n1 == addednodeid:
1082 elif n1 == addednodeid:
1082 # This extra 'a' is added by working copy manifest to mark
1083 # This extra 'a' is added by working copy manifest to mark
1083 # the file as locally added. We should forget it instead of
1084 # the file as locally added. We should forget it instead of
1084 # deleting it.
1085 # deleting it.
1085 actions[f] = ('f', None, "remote deleted")
1086 actions[f] = ('f', None, "remote deleted")
1086 else:
1087 else:
1087 actions[f] = ('r', None, "other deleted")
1088 actions[f] = ('r', None, "other deleted")
1088 elif n2: # file exists only on remote side
1089 elif n2: # file exists only on remote side
1089 if f in copied:
1090 if f in copied:
1090 pass # we'll deal with it on m1 side
1091 pass # we'll deal with it on m1 side
1091 elif f in movewithdir:
1092 elif f in movewithdir:
1092 f2 = movewithdir[f]
1093 f2 = movewithdir[f]
1093 if f2 in m1:
1094 if f2 in m1:
1094 actions[f2] = ('m', (f2, f, None, False, pa.node()),
1095 actions[f2] = ('m', (f2, f, None, False, pa.node()),
1095 "local directory rename, both created")
1096 "local directory rename, both created")
1096 else:
1097 else:
1097 actions[f2] = ('dg', (f, fl2),
1098 actions[f2] = ('dg', (f, fl2),
1098 "local directory rename - get from " + f)
1099 "local directory rename - get from " + f)
1099 elif f in copy:
1100 elif f in copy:
1100 f2 = copy[f]
1101 f2 = copy[f]
1101 if f2 in m2:
1102 if f2 in m2:
1102 actions[f] = ('m', (f2, f, f2, False, pa.node()),
1103 actions[f] = ('m', (f2, f, f2, False, pa.node()),
1103 "remote copied from " + f2)
1104 "remote copied from " + f2)
1104 else:
1105 else:
1105 actions[f] = ('m', (f2, f, f2, True, pa.node()),
1106 actions[f] = ('m', (f2, f, f2, True, pa.node()),
1106 "remote moved from " + f2)
1107 "remote moved from " + f2)
1107 elif f not in ma:
1108 elif f not in ma:
1108 # local unknown, remote created: the logic is described by the
1109 # local unknown, remote created: the logic is described by the
1109 # following table:
1110 # following table:
1110 #
1111 #
1111 # force branchmerge different | action
1112 # force branchmerge different | action
1112 # n * * | create
1113 # n * * | create
1113 # y n * | create
1114 # y n * | create
1114 # y y n | create
1115 # y y n | create
1115 # y y y | merge
1116 # y y y | merge
1116 #
1117 #
1117 # Checking whether the files are different is expensive, so we
1118 # Checking whether the files are different is expensive, so we
1118 # don't do that when we can avoid it.
1119 # don't do that when we can avoid it.
1119 if not force:
1120 if not force:
1120 actions[f] = ('c', (fl2,), "remote created")
1121 actions[f] = ('c', (fl2,), "remote created")
1121 elif not branchmerge:
1122 elif not branchmerge:
1122 actions[f] = ('c', (fl2,), "remote created")
1123 actions[f] = ('c', (fl2,), "remote created")
1123 else:
1124 else:
1124 actions[f] = ('cm', (fl2, pa.node()),
1125 actions[f] = ('cm', (fl2, pa.node()),
1125 "remote created, get or merge")
1126 "remote created, get or merge")
1126 elif n2 != ma[f]:
1127 elif n2 != ma[f]:
1127 df = None
1128 df = None
1128 for d in dirmove:
1129 for d in dirmove:
1129 if f.startswith(d):
1130 if f.startswith(d):
1130 # new file added in a directory that was moved
1131 # new file added in a directory that was moved
1131 df = dirmove[d] + f[len(d):]
1132 df = dirmove[d] + f[len(d):]
1132 break
1133 break
1133 if df is not None and df in m1:
1134 if df is not None and df in m1:
1134 actions[df] = ('m', (df, f, f, False, pa.node()),
1135 actions[df] = ('m', (df, f, f, False, pa.node()),
1135 "local directory rename - respect move from " + f)
1136 "local directory rename - respect move from " + f)
1136 elif acceptremote:
1137 elif acceptremote:
1137 actions[f] = ('c', (fl2,), "remote recreating")
1138 actions[f] = ('c', (fl2,), "remote recreating")
1138 else:
1139 else:
1139 actions[f] = ('dc', (None, f, f, False, pa.node()),
1140 actions[f] = ('dc', (None, f, f, False, pa.node()),
1140 "prompt deleted/changed")
1141 "prompt deleted/changed")
1141
1142
1142 # If we are merging, look for path conflicts.
1143 if repo.ui.configbool('experimental', 'merge.checkpathconflicts'):
1143 checkpathconflicts(repo, wctx, p2, actions)
1144 # If we are merging, look for path conflicts.
1145 checkpathconflicts(repo, wctx, p2, actions)
1144
1146
1145 return actions, diverge, renamedelete
1147 return actions, diverge, renamedelete
1146
1148
1147 def _resolvetrivial(repo, wctx, mctx, ancestor, actions):
1149 def _resolvetrivial(repo, wctx, mctx, ancestor, actions):
1148 """Resolves false conflicts where the nodeid changed but the content
1150 """Resolves false conflicts where the nodeid changed but the content
1149 remained the same."""
1151 remained the same."""
1150
1152
1151 for f, (m, args, msg) in actions.items():
1153 for f, (m, args, msg) in actions.items():
1152 if m == 'cd' and f in ancestor and not wctx[f].cmp(ancestor[f]):
1154 if m == 'cd' and f in ancestor and not wctx[f].cmp(ancestor[f]):
1153 # local did change but ended up with same content
1155 # local did change but ended up with same content
1154 actions[f] = 'r', None, "prompt same"
1156 actions[f] = 'r', None, "prompt same"
1155 elif m == 'dc' and f in ancestor and not mctx[f].cmp(ancestor[f]):
1157 elif m == 'dc' and f in ancestor and not mctx[f].cmp(ancestor[f]):
1156 # remote did change but ended up with same content
1158 # remote did change but ended up with same content
1157 del actions[f] # don't get = keep local deleted
1159 del actions[f] # don't get = keep local deleted
1158
1160
1159 def calculateupdates(repo, wctx, mctx, ancestors, branchmerge, force,
1161 def calculateupdates(repo, wctx, mctx, ancestors, branchmerge, force,
1160 acceptremote, followcopies, matcher=None,
1162 acceptremote, followcopies, matcher=None,
1161 mergeforce=False):
1163 mergeforce=False):
1162 """Calculate the actions needed to merge mctx into wctx using ancestors"""
1164 """Calculate the actions needed to merge mctx into wctx using ancestors"""
1163 # Avoid cycle.
1165 # Avoid cycle.
1164 from . import sparse
1166 from . import sparse
1165
1167
1166 if len(ancestors) == 1: # default
1168 if len(ancestors) == 1: # default
1167 actions, diverge, renamedelete = manifestmerge(
1169 actions, diverge, renamedelete = manifestmerge(
1168 repo, wctx, mctx, ancestors[0], branchmerge, force, matcher,
1170 repo, wctx, mctx, ancestors[0], branchmerge, force, matcher,
1169 acceptremote, followcopies)
1171 acceptremote, followcopies)
1170 _checkunknownfiles(repo, wctx, mctx, force, actions, mergeforce)
1172 _checkunknownfiles(repo, wctx, mctx, force, actions, mergeforce)
1171
1173
1172 else: # only when merge.preferancestor=* - the default
1174 else: # only when merge.preferancestor=* - the default
1173 repo.ui.note(
1175 repo.ui.note(
1174 _("note: merging %s and %s using bids from ancestors %s\n") %
1176 _("note: merging %s and %s using bids from ancestors %s\n") %
1175 (wctx, mctx, _(' and ').join(pycompat.bytestr(anc)
1177 (wctx, mctx, _(' and ').join(pycompat.bytestr(anc)
1176 for anc in ancestors)))
1178 for anc in ancestors)))
1177
1179
1178 # Call for bids
1180 # Call for bids
1179 fbids = {} # mapping filename to bids (action method to list af actions)
1181 fbids = {} # mapping filename to bids (action method to list af actions)
1180 diverge, renamedelete = None, None
1182 diverge, renamedelete = None, None
1181 for ancestor in ancestors:
1183 for ancestor in ancestors:
1182 repo.ui.note(_('\ncalculating bids for ancestor %s\n') % ancestor)
1184 repo.ui.note(_('\ncalculating bids for ancestor %s\n') % ancestor)
1183 actions, diverge1, renamedelete1 = manifestmerge(
1185 actions, diverge1, renamedelete1 = manifestmerge(
1184 repo, wctx, mctx, ancestor, branchmerge, force, matcher,
1186 repo, wctx, mctx, ancestor, branchmerge, force, matcher,
1185 acceptremote, followcopies, forcefulldiff=True)
1187 acceptremote, followcopies, forcefulldiff=True)
1186 _checkunknownfiles(repo, wctx, mctx, force, actions, mergeforce)
1188 _checkunknownfiles(repo, wctx, mctx, force, actions, mergeforce)
1187
1189
1188 # Track the shortest set of warning on the theory that bid
1190 # Track the shortest set of warning on the theory that bid
1189 # merge will correctly incorporate more information
1191 # merge will correctly incorporate more information
1190 if diverge is None or len(diverge1) < len(diverge):
1192 if diverge is None or len(diverge1) < len(diverge):
1191 diverge = diverge1
1193 diverge = diverge1
1192 if renamedelete is None or len(renamedelete) < len(renamedelete1):
1194 if renamedelete is None or len(renamedelete) < len(renamedelete1):
1193 renamedelete = renamedelete1
1195 renamedelete = renamedelete1
1194
1196
1195 for f, a in sorted(actions.iteritems()):
1197 for f, a in sorted(actions.iteritems()):
1196 m, args, msg = a
1198 m, args, msg = a
1197 repo.ui.debug(' %s: %s -> %s\n' % (f, msg, m))
1199 repo.ui.debug(' %s: %s -> %s\n' % (f, msg, m))
1198 if f in fbids:
1200 if f in fbids:
1199 d = fbids[f]
1201 d = fbids[f]
1200 if m in d:
1202 if m in d:
1201 d[m].append(a)
1203 d[m].append(a)
1202 else:
1204 else:
1203 d[m] = [a]
1205 d[m] = [a]
1204 else:
1206 else:
1205 fbids[f] = {m: [a]}
1207 fbids[f] = {m: [a]}
1206
1208
1207 # Pick the best bid for each file
1209 # Pick the best bid for each file
1208 repo.ui.note(_('\nauction for merging merge bids\n'))
1210 repo.ui.note(_('\nauction for merging merge bids\n'))
1209 actions = {}
1211 actions = {}
1210 dms = [] # filenames that have dm actions
1212 dms = [] # filenames that have dm actions
1211 for f, bids in sorted(fbids.items()):
1213 for f, bids in sorted(fbids.items()):
1212 # bids is a mapping from action method to list af actions
1214 # bids is a mapping from action method to list af actions
1213 # Consensus?
1215 # Consensus?
1214 if len(bids) == 1: # all bids are the same kind of method
1216 if len(bids) == 1: # all bids are the same kind of method
1215 m, l = list(bids.items())[0]
1217 m, l = list(bids.items())[0]
1216 if all(a == l[0] for a in l[1:]): # len(bids) is > 1
1218 if all(a == l[0] for a in l[1:]): # len(bids) is > 1
1217 repo.ui.note(_(" %s: consensus for %s\n") % (f, m))
1219 repo.ui.note(_(" %s: consensus for %s\n") % (f, m))
1218 actions[f] = l[0]
1220 actions[f] = l[0]
1219 if m == 'dm':
1221 if m == 'dm':
1220 dms.append(f)
1222 dms.append(f)
1221 continue
1223 continue
1222 # If keep is an option, just do it.
1224 # If keep is an option, just do it.
1223 if 'k' in bids:
1225 if 'k' in bids:
1224 repo.ui.note(_(" %s: picking 'keep' action\n") % f)
1226 repo.ui.note(_(" %s: picking 'keep' action\n") % f)
1225 actions[f] = bids['k'][0]
1227 actions[f] = bids['k'][0]
1226 continue
1228 continue
1227 # If there are gets and they all agree [how could they not?], do it.
1229 # If there are gets and they all agree [how could they not?], do it.
1228 if 'g' in bids:
1230 if 'g' in bids:
1229 ga0 = bids['g'][0]
1231 ga0 = bids['g'][0]
1230 if all(a == ga0 for a in bids['g'][1:]):
1232 if all(a == ga0 for a in bids['g'][1:]):
1231 repo.ui.note(_(" %s: picking 'get' action\n") % f)
1233 repo.ui.note(_(" %s: picking 'get' action\n") % f)
1232 actions[f] = ga0
1234 actions[f] = ga0
1233 continue
1235 continue
1234 # TODO: Consider other simple actions such as mode changes
1236 # TODO: Consider other simple actions such as mode changes
1235 # Handle inefficient democrazy.
1237 # Handle inefficient democrazy.
1236 repo.ui.note(_(' %s: multiple bids for merge action:\n') % f)
1238 repo.ui.note(_(' %s: multiple bids for merge action:\n') % f)
1237 for m, l in sorted(bids.items()):
1239 for m, l in sorted(bids.items()):
1238 for _f, args, msg in l:
1240 for _f, args, msg in l:
1239 repo.ui.note(' %s -> %s\n' % (msg, m))
1241 repo.ui.note(' %s -> %s\n' % (msg, m))
1240 # Pick random action. TODO: Instead, prompt user when resolving
1242 # Pick random action. TODO: Instead, prompt user when resolving
1241 m, l = list(bids.items())[0]
1243 m, l = list(bids.items())[0]
1242 repo.ui.warn(_(' %s: ambiguous merge - picked %s action\n') %
1244 repo.ui.warn(_(' %s: ambiguous merge - picked %s action\n') %
1243 (f, m))
1245 (f, m))
1244 actions[f] = l[0]
1246 actions[f] = l[0]
1245 if m == 'dm':
1247 if m == 'dm':
1246 dms.append(f)
1248 dms.append(f)
1247 continue
1249 continue
1248 # Work around 'dm' that can cause multiple actions for the same file
1250 # Work around 'dm' that can cause multiple actions for the same file
1249 for f in dms:
1251 for f in dms:
1250 dm, (f0, flags), msg = actions[f]
1252 dm, (f0, flags), msg = actions[f]
1251 assert dm == 'dm', dm
1253 assert dm == 'dm', dm
1252 if f0 in actions and actions[f0][0] == 'r':
1254 if f0 in actions and actions[f0][0] == 'r':
1253 # We have one bid for removing a file and another for moving it.
1255 # We have one bid for removing a file and another for moving it.
1254 # These two could be merged as first move and then delete ...
1256 # These two could be merged as first move and then delete ...
1255 # but instead drop moving and just delete.
1257 # but instead drop moving and just delete.
1256 del actions[f]
1258 del actions[f]
1257 repo.ui.note(_('end of auction\n\n'))
1259 repo.ui.note(_('end of auction\n\n'))
1258
1260
1259 _resolvetrivial(repo, wctx, mctx, ancestors[0], actions)
1261 _resolvetrivial(repo, wctx, mctx, ancestors[0], actions)
1260
1262
1261 if wctx.rev() is None:
1263 if wctx.rev() is None:
1262 fractions = _forgetremoved(wctx, mctx, branchmerge)
1264 fractions = _forgetremoved(wctx, mctx, branchmerge)
1263 actions.update(fractions)
1265 actions.update(fractions)
1264
1266
1265 prunedactions = sparse.filterupdatesactions(repo, wctx, mctx, branchmerge,
1267 prunedactions = sparse.filterupdatesactions(repo, wctx, mctx, branchmerge,
1266 actions)
1268 actions)
1267
1269
1268 return prunedactions, diverge, renamedelete
1270 return prunedactions, diverge, renamedelete
1269
1271
1270 def _getcwd():
1272 def _getcwd():
1271 try:
1273 try:
1272 return pycompat.getcwd()
1274 return pycompat.getcwd()
1273 except OSError as err:
1275 except OSError as err:
1274 if err.errno == errno.ENOENT:
1276 if err.errno == errno.ENOENT:
1275 return None
1277 return None
1276 raise
1278 raise
1277
1279
1278 def batchremove(repo, wctx, actions):
1280 def batchremove(repo, wctx, actions):
1279 """apply removes to the working directory
1281 """apply removes to the working directory
1280
1282
1281 yields tuples for progress updates
1283 yields tuples for progress updates
1282 """
1284 """
1283 verbose = repo.ui.verbose
1285 verbose = repo.ui.verbose
1284 cwd = _getcwd()
1286 cwd = _getcwd()
1285 i = 0
1287 i = 0
1286 for f, args, msg in actions:
1288 for f, args, msg in actions:
1287 repo.ui.debug(" %s: %s -> r\n" % (f, msg))
1289 repo.ui.debug(" %s: %s -> r\n" % (f, msg))
1288 if verbose:
1290 if verbose:
1289 repo.ui.note(_("removing %s\n") % f)
1291 repo.ui.note(_("removing %s\n") % f)
1290 wctx[f].audit()
1292 wctx[f].audit()
1291 try:
1293 try:
1292 wctx[f].remove(ignoremissing=True)
1294 wctx[f].remove(ignoremissing=True)
1293 except OSError as inst:
1295 except OSError as inst:
1294 repo.ui.warn(_("update failed to remove %s: %s!\n") %
1296 repo.ui.warn(_("update failed to remove %s: %s!\n") %
1295 (f, inst.strerror))
1297 (f, inst.strerror))
1296 if i == 100:
1298 if i == 100:
1297 yield i, f
1299 yield i, f
1298 i = 0
1300 i = 0
1299 i += 1
1301 i += 1
1300 if i > 0:
1302 if i > 0:
1301 yield i, f
1303 yield i, f
1302
1304
1303 if cwd and not _getcwd():
1305 if cwd and not _getcwd():
1304 # cwd was removed in the course of removing files; print a helpful
1306 # cwd was removed in the course of removing files; print a helpful
1305 # warning.
1307 # warning.
1306 repo.ui.warn(_("current directory was removed\n"
1308 repo.ui.warn(_("current directory was removed\n"
1307 "(consider changing to repo root: %s)\n") % repo.root)
1309 "(consider changing to repo root: %s)\n") % repo.root)
1308
1310
1309 # It's necessary to flush here in case we're inside a worker fork and will
1311 # It's necessary to flush here in case we're inside a worker fork and will
1310 # quit after this function.
1312 # quit after this function.
1311 wctx.flushall()
1313 wctx.flushall()
1312
1314
1313 def batchget(repo, mctx, wctx, actions):
1315 def batchget(repo, mctx, wctx, actions):
1314 """apply gets to the working directory
1316 """apply gets to the working directory
1315
1317
1316 mctx is the context to get from
1318 mctx is the context to get from
1317
1319
1318 yields tuples for progress updates
1320 yields tuples for progress updates
1319 """
1321 """
1320 verbose = repo.ui.verbose
1322 verbose = repo.ui.verbose
1321 fctx = mctx.filectx
1323 fctx = mctx.filectx
1322 ui = repo.ui
1324 ui = repo.ui
1323 i = 0
1325 i = 0
1324 with repo.wvfs.backgroundclosing(ui, expectedcount=len(actions)):
1326 with repo.wvfs.backgroundclosing(ui, expectedcount=len(actions)):
1325 for f, (flags, backup), msg in actions:
1327 for f, (flags, backup), msg in actions:
1326 repo.ui.debug(" %s: %s -> g\n" % (f, msg))
1328 repo.ui.debug(" %s: %s -> g\n" % (f, msg))
1327 if verbose:
1329 if verbose:
1328 repo.ui.note(_("getting %s\n") % f)
1330 repo.ui.note(_("getting %s\n") % f)
1329
1331
1330 if backup:
1332 if backup:
1331 # If a file or directory exists with the same name, back that
1333 # If a file or directory exists with the same name, back that
1332 # up. Otherwise, look to see if there is a file that conflicts
1334 # up. Otherwise, look to see if there is a file that conflicts
1333 # with a directory this file is in, and if so, back that up.
1335 # with a directory this file is in, and if so, back that up.
1334 absf = repo.wjoin(f)
1336 absf = repo.wjoin(f)
1335 if not repo.wvfs.lexists(f):
1337 if not repo.wvfs.lexists(f):
1336 for p in util.finddirs(f):
1338 for p in util.finddirs(f):
1337 if repo.wvfs.isfileorlink(p):
1339 if repo.wvfs.isfileorlink(p):
1338 absf = repo.wjoin(p)
1340 absf = repo.wjoin(p)
1339 break
1341 break
1340 orig = scmutil.origpath(ui, repo, absf)
1342 orig = scmutil.origpath(ui, repo, absf)
1341 if repo.wvfs.lexists(absf):
1343 if repo.wvfs.lexists(absf):
1342 util.rename(absf, orig)
1344 util.rename(absf, orig)
1343 wctx[f].clearunknown()
1345 wctx[f].clearunknown()
1344 wctx[f].write(fctx(f).data(), flags, backgroundclose=True)
1346 wctx[f].write(fctx(f).data(), flags, backgroundclose=True)
1345 if i == 100:
1347 if i == 100:
1346 yield i, f
1348 yield i, f
1347 i = 0
1349 i = 0
1348 i += 1
1350 i += 1
1349 if i > 0:
1351 if i > 0:
1350 yield i, f
1352 yield i, f
1351
1353
1352 # It's necessary to flush here in case we're inside a worker fork and will
1354 # It's necessary to flush here in case we're inside a worker fork and will
1353 # quit after this function.
1355 # quit after this function.
1354 wctx.flushall()
1356 wctx.flushall()
1355
1357
1356 def applyupdates(repo, actions, wctx, mctx, overwrite, labels=None):
1358 def applyupdates(repo, actions, wctx, mctx, overwrite, labels=None):
1357 """apply the merge action list to the working directory
1359 """apply the merge action list to the working directory
1358
1360
1359 wctx is the working copy context
1361 wctx is the working copy context
1360 mctx is the context to be merged into the working copy
1362 mctx is the context to be merged into the working copy
1361
1363
1362 Return a tuple of counts (updated, merged, removed, unresolved) that
1364 Return a tuple of counts (updated, merged, removed, unresolved) that
1363 describes how many files were affected by the update.
1365 describes how many files were affected by the update.
1364 """
1366 """
1365
1367
1366 updated, merged, removed = 0, 0, 0
1368 updated, merged, removed = 0, 0, 0
1367 ms = mergestate.clean(repo, wctx.p1().node(), mctx.node(), labels)
1369 ms = mergestate.clean(repo, wctx.p1().node(), mctx.node(), labels)
1368 moves = []
1370 moves = []
1369 for m, l in actions.items():
1371 for m, l in actions.items():
1370 l.sort()
1372 l.sort()
1371
1373
1372 # 'cd' and 'dc' actions are treated like other merge conflicts
1374 # 'cd' and 'dc' actions are treated like other merge conflicts
1373 mergeactions = sorted(actions['cd'])
1375 mergeactions = sorted(actions['cd'])
1374 mergeactions.extend(sorted(actions['dc']))
1376 mergeactions.extend(sorted(actions['dc']))
1375 mergeactions.extend(actions['m'])
1377 mergeactions.extend(actions['m'])
1376 for f, args, msg in mergeactions:
1378 for f, args, msg in mergeactions:
1377 f1, f2, fa, move, anc = args
1379 f1, f2, fa, move, anc = args
1378 if f == '.hgsubstate': # merged internally
1380 if f == '.hgsubstate': # merged internally
1379 continue
1381 continue
1380 if f1 is None:
1382 if f1 is None:
1381 fcl = filemerge.absentfilectx(wctx, fa)
1383 fcl = filemerge.absentfilectx(wctx, fa)
1382 else:
1384 else:
1383 repo.ui.debug(" preserving %s for resolve of %s\n" % (f1, f))
1385 repo.ui.debug(" preserving %s for resolve of %s\n" % (f1, f))
1384 fcl = wctx[f1]
1386 fcl = wctx[f1]
1385 if f2 is None:
1387 if f2 is None:
1386 fco = filemerge.absentfilectx(mctx, fa)
1388 fco = filemerge.absentfilectx(mctx, fa)
1387 else:
1389 else:
1388 fco = mctx[f2]
1390 fco = mctx[f2]
1389 actx = repo[anc]
1391 actx = repo[anc]
1390 if fa in actx:
1392 if fa in actx:
1391 fca = actx[fa]
1393 fca = actx[fa]
1392 else:
1394 else:
1393 # TODO: move to absentfilectx
1395 # TODO: move to absentfilectx
1394 fca = repo.filectx(f1, fileid=nullrev)
1396 fca = repo.filectx(f1, fileid=nullrev)
1395 ms.add(fcl, fco, fca, f)
1397 ms.add(fcl, fco, fca, f)
1396 if f1 != f and move:
1398 if f1 != f and move:
1397 moves.append(f1)
1399 moves.append(f1)
1398
1400
1399 _updating = _('updating')
1401 _updating = _('updating')
1400 _files = _('files')
1402 _files = _('files')
1401 progress = repo.ui.progress
1403 progress = repo.ui.progress
1402
1404
1403 # remove renamed files after safely stored
1405 # remove renamed files after safely stored
1404 for f in moves:
1406 for f in moves:
1405 if wctx[f].lexists():
1407 if wctx[f].lexists():
1406 repo.ui.debug("removing %s\n" % f)
1408 repo.ui.debug("removing %s\n" % f)
1407 wctx[f].audit()
1409 wctx[f].audit()
1408 wctx[f].remove()
1410 wctx[f].remove()
1409
1411
1410 numupdates = sum(len(l) for m, l in actions.items() if m != 'k')
1412 numupdates = sum(len(l) for m, l in actions.items() if m != 'k')
1411 z = 0
1413 z = 0
1412
1414
1413 if [a for a in actions['r'] if a[0] == '.hgsubstate']:
1415 if [a for a in actions['r'] if a[0] == '.hgsubstate']:
1414 subrepo.submerge(repo, wctx, mctx, wctx, overwrite, labels)
1416 subrepo.submerge(repo, wctx, mctx, wctx, overwrite, labels)
1415
1417
1416 # record path conflicts
1418 # record path conflicts
1417 for f, args, msg in actions['p']:
1419 for f, args, msg in actions['p']:
1418 f1, fo = args
1420 f1, fo = args
1419 s = repo.ui.status
1421 s = repo.ui.status
1420 s(_("%s: path conflict - a file or link has the same name as a "
1422 s(_("%s: path conflict - a file or link has the same name as a "
1421 "directory\n") % f)
1423 "directory\n") % f)
1422 if fo == 'l':
1424 if fo == 'l':
1423 s(_("the local file has been renamed to %s\n") % f1)
1425 s(_("the local file has been renamed to %s\n") % f1)
1424 else:
1426 else:
1425 s(_("the remote file has been renamed to %s\n") % f1)
1427 s(_("the remote file has been renamed to %s\n") % f1)
1426 s(_("resolve manually then use 'hg resolve --mark %s'\n") % f)
1428 s(_("resolve manually then use 'hg resolve --mark %s'\n") % f)
1427 ms.addpath(f, f1, fo)
1429 ms.addpath(f, f1, fo)
1428 z += 1
1430 z += 1
1429 progress(_updating, z, item=f, total=numupdates, unit=_files)
1431 progress(_updating, z, item=f, total=numupdates, unit=_files)
1430
1432
1431 # When merging in-memory, we can't support worker processes, so set the
1433 # When merging in-memory, we can't support worker processes, so set the
1432 # per-item cost at 0 in that case.
1434 # per-item cost at 0 in that case.
1433 cost = 0 if wctx.isinmemory() else 0.001
1435 cost = 0 if wctx.isinmemory() else 0.001
1434
1436
1435 # remove in parallel (must come before resolving path conflicts and getting)
1437 # remove in parallel (must come before resolving path conflicts and getting)
1436 prog = worker.worker(repo.ui, cost, batchremove, (repo, wctx),
1438 prog = worker.worker(repo.ui, cost, batchremove, (repo, wctx),
1437 actions['r'])
1439 actions['r'])
1438 for i, item in prog:
1440 for i, item in prog:
1439 z += i
1441 z += i
1440 progress(_updating, z, item=item, total=numupdates, unit=_files)
1442 progress(_updating, z, item=item, total=numupdates, unit=_files)
1441 removed = len(actions['r'])
1443 removed = len(actions['r'])
1442
1444
1443 # resolve path conflicts (must come before getting)
1445 # resolve path conflicts (must come before getting)
1444 for f, args, msg in actions['pr']:
1446 for f, args, msg in actions['pr']:
1445 repo.ui.debug(" %s: %s -> pr\n" % (f, msg))
1447 repo.ui.debug(" %s: %s -> pr\n" % (f, msg))
1446 f0, = args
1448 f0, = args
1447 if wctx[f0].lexists():
1449 if wctx[f0].lexists():
1448 repo.ui.note(_("moving %s to %s\n") % (f0, f))
1450 repo.ui.note(_("moving %s to %s\n") % (f0, f))
1449 wctx[f].audit()
1451 wctx[f].audit()
1450 wctx[f].write(wctx.filectx(f0).data(), wctx.filectx(f0).flags())
1452 wctx[f].write(wctx.filectx(f0).data(), wctx.filectx(f0).flags())
1451 wctx[f0].remove()
1453 wctx[f0].remove()
1452 z += 1
1454 z += 1
1453 progress(_updating, z, item=f, total=numupdates, unit=_files)
1455 progress(_updating, z, item=f, total=numupdates, unit=_files)
1454
1456
1455 # We should flush before forking into worker processes, since those workers
1457 # We should flush before forking into worker processes, since those workers
1456 # flush when they complete, and we don't want to duplicate work.
1458 # flush when they complete, and we don't want to duplicate work.
1457 wctx.flushall()
1459 wctx.flushall()
1458
1460
1459 # get in parallel
1461 # get in parallel
1460 prog = worker.worker(repo.ui, cost, batchget, (repo, mctx, wctx),
1462 prog = worker.worker(repo.ui, cost, batchget, (repo, mctx, wctx),
1461 actions['g'])
1463 actions['g'])
1462 for i, item in prog:
1464 for i, item in prog:
1463 z += i
1465 z += i
1464 progress(_updating, z, item=item, total=numupdates, unit=_files)
1466 progress(_updating, z, item=item, total=numupdates, unit=_files)
1465 updated = len(actions['g'])
1467 updated = len(actions['g'])
1466
1468
1467 if [a for a in actions['g'] if a[0] == '.hgsubstate']:
1469 if [a for a in actions['g'] if a[0] == '.hgsubstate']:
1468 subrepo.submerge(repo, wctx, mctx, wctx, overwrite, labels)
1470 subrepo.submerge(repo, wctx, mctx, wctx, overwrite, labels)
1469
1471
1470 # forget (manifest only, just log it) (must come first)
1472 # forget (manifest only, just log it) (must come first)
1471 for f, args, msg in actions['f']:
1473 for f, args, msg in actions['f']:
1472 repo.ui.debug(" %s: %s -> f\n" % (f, msg))
1474 repo.ui.debug(" %s: %s -> f\n" % (f, msg))
1473 z += 1
1475 z += 1
1474 progress(_updating, z, item=f, total=numupdates, unit=_files)
1476 progress(_updating, z, item=f, total=numupdates, unit=_files)
1475
1477
1476 # re-add (manifest only, just log it)
1478 # re-add (manifest only, just log it)
1477 for f, args, msg in actions['a']:
1479 for f, args, msg in actions['a']:
1478 repo.ui.debug(" %s: %s -> a\n" % (f, msg))
1480 repo.ui.debug(" %s: %s -> a\n" % (f, msg))
1479 z += 1
1481 z += 1
1480 progress(_updating, z, item=f, total=numupdates, unit=_files)
1482 progress(_updating, z, item=f, total=numupdates, unit=_files)
1481
1483
1482 # re-add/mark as modified (manifest only, just log it)
1484 # re-add/mark as modified (manifest only, just log it)
1483 for f, args, msg in actions['am']:
1485 for f, args, msg in actions['am']:
1484 repo.ui.debug(" %s: %s -> am\n" % (f, msg))
1486 repo.ui.debug(" %s: %s -> am\n" % (f, msg))
1485 z += 1
1487 z += 1
1486 progress(_updating, z, item=f, total=numupdates, unit=_files)
1488 progress(_updating, z, item=f, total=numupdates, unit=_files)
1487
1489
1488 # keep (noop, just log it)
1490 # keep (noop, just log it)
1489 for f, args, msg in actions['k']:
1491 for f, args, msg in actions['k']:
1490 repo.ui.debug(" %s: %s -> k\n" % (f, msg))
1492 repo.ui.debug(" %s: %s -> k\n" % (f, msg))
1491 # no progress
1493 # no progress
1492
1494
1493 # directory rename, move local
1495 # directory rename, move local
1494 for f, args, msg in actions['dm']:
1496 for f, args, msg in actions['dm']:
1495 repo.ui.debug(" %s: %s -> dm\n" % (f, msg))
1497 repo.ui.debug(" %s: %s -> dm\n" % (f, msg))
1496 z += 1
1498 z += 1
1497 progress(_updating, z, item=f, total=numupdates, unit=_files)
1499 progress(_updating, z, item=f, total=numupdates, unit=_files)
1498 f0, flags = args
1500 f0, flags = args
1499 repo.ui.note(_("moving %s to %s\n") % (f0, f))
1501 repo.ui.note(_("moving %s to %s\n") % (f0, f))
1500 wctx[f].audit()
1502 wctx[f].audit()
1501 wctx[f].write(wctx.filectx(f0).data(), flags)
1503 wctx[f].write(wctx.filectx(f0).data(), flags)
1502 wctx[f0].remove()
1504 wctx[f0].remove()
1503 updated += 1
1505 updated += 1
1504
1506
1505 # local directory rename, get
1507 # local directory rename, get
1506 for f, args, msg in actions['dg']:
1508 for f, args, msg in actions['dg']:
1507 repo.ui.debug(" %s: %s -> dg\n" % (f, msg))
1509 repo.ui.debug(" %s: %s -> dg\n" % (f, msg))
1508 z += 1
1510 z += 1
1509 progress(_updating, z, item=f, total=numupdates, unit=_files)
1511 progress(_updating, z, item=f, total=numupdates, unit=_files)
1510 f0, flags = args
1512 f0, flags = args
1511 repo.ui.note(_("getting %s to %s\n") % (f0, f))
1513 repo.ui.note(_("getting %s to %s\n") % (f0, f))
1512 wctx[f].write(mctx.filectx(f0).data(), flags)
1514 wctx[f].write(mctx.filectx(f0).data(), flags)
1513 updated += 1
1515 updated += 1
1514
1516
1515 # exec
1517 # exec
1516 for f, args, msg in actions['e']:
1518 for f, args, msg in actions['e']:
1517 repo.ui.debug(" %s: %s -> e\n" % (f, msg))
1519 repo.ui.debug(" %s: %s -> e\n" % (f, msg))
1518 z += 1
1520 z += 1
1519 progress(_updating, z, item=f, total=numupdates, unit=_files)
1521 progress(_updating, z, item=f, total=numupdates, unit=_files)
1520 flags, = args
1522 flags, = args
1521 wctx[f].audit()
1523 wctx[f].audit()
1522 wctx[f].setflags('l' in flags, 'x' in flags)
1524 wctx[f].setflags('l' in flags, 'x' in flags)
1523 updated += 1
1525 updated += 1
1524
1526
1525 # the ordering is important here -- ms.mergedriver will raise if the merge
1527 # the ordering is important here -- ms.mergedriver will raise if the merge
1526 # driver has changed, and we want to be able to bypass it when overwrite is
1528 # driver has changed, and we want to be able to bypass it when overwrite is
1527 # True
1529 # True
1528 usemergedriver = not overwrite and mergeactions and ms.mergedriver
1530 usemergedriver = not overwrite and mergeactions and ms.mergedriver
1529
1531
1530 if usemergedriver:
1532 if usemergedriver:
1531 ms.commit()
1533 ms.commit()
1532 proceed = driverpreprocess(repo, ms, wctx, labels=labels)
1534 proceed = driverpreprocess(repo, ms, wctx, labels=labels)
1533 # the driver might leave some files unresolved
1535 # the driver might leave some files unresolved
1534 unresolvedf = set(ms.unresolved())
1536 unresolvedf = set(ms.unresolved())
1535 if not proceed:
1537 if not proceed:
1536 # XXX setting unresolved to at least 1 is a hack to make sure we
1538 # XXX setting unresolved to at least 1 is a hack to make sure we
1537 # error out
1539 # error out
1538 return updated, merged, removed, max(len(unresolvedf), 1)
1540 return updated, merged, removed, max(len(unresolvedf), 1)
1539 newactions = []
1541 newactions = []
1540 for f, args, msg in mergeactions:
1542 for f, args, msg in mergeactions:
1541 if f in unresolvedf:
1543 if f in unresolvedf:
1542 newactions.append((f, args, msg))
1544 newactions.append((f, args, msg))
1543 mergeactions = newactions
1545 mergeactions = newactions
1544
1546
1545 try:
1547 try:
1546 # premerge
1548 # premerge
1547 tocomplete = []
1549 tocomplete = []
1548 for f, args, msg in mergeactions:
1550 for f, args, msg in mergeactions:
1549 repo.ui.debug(" %s: %s -> m (premerge)\n" % (f, msg))
1551 repo.ui.debug(" %s: %s -> m (premerge)\n" % (f, msg))
1550 z += 1
1552 z += 1
1551 progress(_updating, z, item=f, total=numupdates, unit=_files)
1553 progress(_updating, z, item=f, total=numupdates, unit=_files)
1552 if f == '.hgsubstate': # subrepo states need updating
1554 if f == '.hgsubstate': # subrepo states need updating
1553 subrepo.submerge(repo, wctx, mctx, wctx.ancestor(mctx),
1555 subrepo.submerge(repo, wctx, mctx, wctx.ancestor(mctx),
1554 overwrite, labels)
1556 overwrite, labels)
1555 continue
1557 continue
1556 wctx[f].audit()
1558 wctx[f].audit()
1557 complete, r = ms.preresolve(f, wctx)
1559 complete, r = ms.preresolve(f, wctx)
1558 if not complete:
1560 if not complete:
1559 numupdates += 1
1561 numupdates += 1
1560 tocomplete.append((f, args, msg))
1562 tocomplete.append((f, args, msg))
1561
1563
1562 # merge
1564 # merge
1563 for f, args, msg in tocomplete:
1565 for f, args, msg in tocomplete:
1564 repo.ui.debug(" %s: %s -> m (merge)\n" % (f, msg))
1566 repo.ui.debug(" %s: %s -> m (merge)\n" % (f, msg))
1565 z += 1
1567 z += 1
1566 progress(_updating, z, item=f, total=numupdates, unit=_files)
1568 progress(_updating, z, item=f, total=numupdates, unit=_files)
1567 ms.resolve(f, wctx)
1569 ms.resolve(f, wctx)
1568
1570
1569 finally:
1571 finally:
1570 ms.commit()
1572 ms.commit()
1571
1573
1572 unresolved = ms.unresolvedcount()
1574 unresolved = ms.unresolvedcount()
1573
1575
1574 if usemergedriver and not unresolved and ms.mdstate() != 's':
1576 if usemergedriver and not unresolved and ms.mdstate() != 's':
1575 if not driverconclude(repo, ms, wctx, labels=labels):
1577 if not driverconclude(repo, ms, wctx, labels=labels):
1576 # XXX setting unresolved to at least 1 is a hack to make sure we
1578 # XXX setting unresolved to at least 1 is a hack to make sure we
1577 # error out
1579 # error out
1578 unresolved = max(unresolved, 1)
1580 unresolved = max(unresolved, 1)
1579
1581
1580 ms.commit()
1582 ms.commit()
1581
1583
1582 msupdated, msmerged, msremoved = ms.counts()
1584 msupdated, msmerged, msremoved = ms.counts()
1583 updated += msupdated
1585 updated += msupdated
1584 merged += msmerged
1586 merged += msmerged
1585 removed += msremoved
1587 removed += msremoved
1586
1588
1587 extraactions = ms.actions()
1589 extraactions = ms.actions()
1588 if extraactions:
1590 if extraactions:
1589 mfiles = set(a[0] for a in actions['m'])
1591 mfiles = set(a[0] for a in actions['m'])
1590 for k, acts in extraactions.iteritems():
1592 for k, acts in extraactions.iteritems():
1591 actions[k].extend(acts)
1593 actions[k].extend(acts)
1592 # Remove these files from actions['m'] as well. This is important
1594 # Remove these files from actions['m'] as well. This is important
1593 # because in recordupdates, files in actions['m'] are processed
1595 # because in recordupdates, files in actions['m'] are processed
1594 # after files in other actions, and the merge driver might add
1596 # after files in other actions, and the merge driver might add
1595 # files to those actions via extraactions above. This can lead to a
1597 # files to those actions via extraactions above. This can lead to a
1596 # file being recorded twice, with poor results. This is especially
1598 # file being recorded twice, with poor results. This is especially
1597 # problematic for actions['r'] (currently only possible with the
1599 # problematic for actions['r'] (currently only possible with the
1598 # merge driver in the initial merge process; interrupted merges
1600 # merge driver in the initial merge process; interrupted merges
1599 # don't go through this flow).
1601 # don't go through this flow).
1600 #
1602 #
1601 # The real fix here is to have indexes by both file and action so
1603 # The real fix here is to have indexes by both file and action so
1602 # that when the action for a file is changed it is automatically
1604 # that when the action for a file is changed it is automatically
1603 # reflected in the other action lists. But that involves a more
1605 # reflected in the other action lists. But that involves a more
1604 # complex data structure, so this will do for now.
1606 # complex data structure, so this will do for now.
1605 #
1607 #
1606 # We don't need to do the same operation for 'dc' and 'cd' because
1608 # We don't need to do the same operation for 'dc' and 'cd' because
1607 # those lists aren't consulted again.
1609 # those lists aren't consulted again.
1608 mfiles.difference_update(a[0] for a in acts)
1610 mfiles.difference_update(a[0] for a in acts)
1609
1611
1610 actions['m'] = [a for a in actions['m'] if a[0] in mfiles]
1612 actions['m'] = [a for a in actions['m'] if a[0] in mfiles]
1611
1613
1612 progress(_updating, None, total=numupdates, unit=_files)
1614 progress(_updating, None, total=numupdates, unit=_files)
1613
1615
1614 return updated, merged, removed, unresolved
1616 return updated, merged, removed, unresolved
1615
1617
1616 def recordupdates(repo, actions, branchmerge):
1618 def recordupdates(repo, actions, branchmerge):
1617 "record merge actions to the dirstate"
1619 "record merge actions to the dirstate"
1618 # remove (must come first)
1620 # remove (must come first)
1619 for f, args, msg in actions.get('r', []):
1621 for f, args, msg in actions.get('r', []):
1620 if branchmerge:
1622 if branchmerge:
1621 repo.dirstate.remove(f)
1623 repo.dirstate.remove(f)
1622 else:
1624 else:
1623 repo.dirstate.drop(f)
1625 repo.dirstate.drop(f)
1624
1626
1625 # forget (must come first)
1627 # forget (must come first)
1626 for f, args, msg in actions.get('f', []):
1628 for f, args, msg in actions.get('f', []):
1627 repo.dirstate.drop(f)
1629 repo.dirstate.drop(f)
1628
1630
1629 # resolve path conflicts
1631 # resolve path conflicts
1630 for f, args, msg in actions.get('pr', []):
1632 for f, args, msg in actions.get('pr', []):
1631 f0, = args
1633 f0, = args
1632 origf0 = repo.dirstate.copied(f0) or f0
1634 origf0 = repo.dirstate.copied(f0) or f0
1633 repo.dirstate.add(f)
1635 repo.dirstate.add(f)
1634 repo.dirstate.copy(origf0, f)
1636 repo.dirstate.copy(origf0, f)
1635 if f0 == origf0:
1637 if f0 == origf0:
1636 repo.dirstate.remove(f0)
1638 repo.dirstate.remove(f0)
1637 else:
1639 else:
1638 repo.dirstate.drop(f0)
1640 repo.dirstate.drop(f0)
1639
1641
1640 # re-add
1642 # re-add
1641 for f, args, msg in actions.get('a', []):
1643 for f, args, msg in actions.get('a', []):
1642 repo.dirstate.add(f)
1644 repo.dirstate.add(f)
1643
1645
1644 # re-add/mark as modified
1646 # re-add/mark as modified
1645 for f, args, msg in actions.get('am', []):
1647 for f, args, msg in actions.get('am', []):
1646 if branchmerge:
1648 if branchmerge:
1647 repo.dirstate.normallookup(f)
1649 repo.dirstate.normallookup(f)
1648 else:
1650 else:
1649 repo.dirstate.add(f)
1651 repo.dirstate.add(f)
1650
1652
1651 # exec change
1653 # exec change
1652 for f, args, msg in actions.get('e', []):
1654 for f, args, msg in actions.get('e', []):
1653 repo.dirstate.normallookup(f)
1655 repo.dirstate.normallookup(f)
1654
1656
1655 # keep
1657 # keep
1656 for f, args, msg in actions.get('k', []):
1658 for f, args, msg in actions.get('k', []):
1657 pass
1659 pass
1658
1660
1659 # get
1661 # get
1660 for f, args, msg in actions.get('g', []):
1662 for f, args, msg in actions.get('g', []):
1661 if branchmerge:
1663 if branchmerge:
1662 repo.dirstate.otherparent(f)
1664 repo.dirstate.otherparent(f)
1663 else:
1665 else:
1664 repo.dirstate.normal(f)
1666 repo.dirstate.normal(f)
1665
1667
1666 # merge
1668 # merge
1667 for f, args, msg in actions.get('m', []):
1669 for f, args, msg in actions.get('m', []):
1668 f1, f2, fa, move, anc = args
1670 f1, f2, fa, move, anc = args
1669 if branchmerge:
1671 if branchmerge:
1670 # We've done a branch merge, mark this file as merged
1672 # We've done a branch merge, mark this file as merged
1671 # so that we properly record the merger later
1673 # so that we properly record the merger later
1672 repo.dirstate.merge(f)
1674 repo.dirstate.merge(f)
1673 if f1 != f2: # copy/rename
1675 if f1 != f2: # copy/rename
1674 if move:
1676 if move:
1675 repo.dirstate.remove(f1)
1677 repo.dirstate.remove(f1)
1676 if f1 != f:
1678 if f1 != f:
1677 repo.dirstate.copy(f1, f)
1679 repo.dirstate.copy(f1, f)
1678 else:
1680 else:
1679 repo.dirstate.copy(f2, f)
1681 repo.dirstate.copy(f2, f)
1680 else:
1682 else:
1681 # We've update-merged a locally modified file, so
1683 # We've update-merged a locally modified file, so
1682 # we set the dirstate to emulate a normal checkout
1684 # we set the dirstate to emulate a normal checkout
1683 # of that file some time in the past. Thus our
1685 # of that file some time in the past. Thus our
1684 # merge will appear as a normal local file
1686 # merge will appear as a normal local file
1685 # modification.
1687 # modification.
1686 if f2 == f: # file not locally copied/moved
1688 if f2 == f: # file not locally copied/moved
1687 repo.dirstate.normallookup(f)
1689 repo.dirstate.normallookup(f)
1688 if move:
1690 if move:
1689 repo.dirstate.drop(f1)
1691 repo.dirstate.drop(f1)
1690
1692
1691 # directory rename, move local
1693 # directory rename, move local
1692 for f, args, msg in actions.get('dm', []):
1694 for f, args, msg in actions.get('dm', []):
1693 f0, flag = args
1695 f0, flag = args
1694 if branchmerge:
1696 if branchmerge:
1695 repo.dirstate.add(f)
1697 repo.dirstate.add(f)
1696 repo.dirstate.remove(f0)
1698 repo.dirstate.remove(f0)
1697 repo.dirstate.copy(f0, f)
1699 repo.dirstate.copy(f0, f)
1698 else:
1700 else:
1699 repo.dirstate.normal(f)
1701 repo.dirstate.normal(f)
1700 repo.dirstate.drop(f0)
1702 repo.dirstate.drop(f0)
1701
1703
1702 # directory rename, get
1704 # directory rename, get
1703 for f, args, msg in actions.get('dg', []):
1705 for f, args, msg in actions.get('dg', []):
1704 f0, flag = args
1706 f0, flag = args
1705 if branchmerge:
1707 if branchmerge:
1706 repo.dirstate.add(f)
1708 repo.dirstate.add(f)
1707 repo.dirstate.copy(f0, f)
1709 repo.dirstate.copy(f0, f)
1708 else:
1710 else:
1709 repo.dirstate.normal(f)
1711 repo.dirstate.normal(f)
1710
1712
1711 def update(repo, node, branchmerge, force, ancestor=None,
1713 def update(repo, node, branchmerge, force, ancestor=None,
1712 mergeancestor=False, labels=None, matcher=None, mergeforce=False,
1714 mergeancestor=False, labels=None, matcher=None, mergeforce=False,
1713 updatecheck=None, wc=None):
1715 updatecheck=None, wc=None):
1714 """
1716 """
1715 Perform a merge between the working directory and the given node
1717 Perform a merge between the working directory and the given node
1716
1718
1717 node = the node to update to
1719 node = the node to update to
1718 branchmerge = whether to merge between branches
1720 branchmerge = whether to merge between branches
1719 force = whether to force branch merging or file overwriting
1721 force = whether to force branch merging or file overwriting
1720 matcher = a matcher to filter file lists (dirstate not updated)
1722 matcher = a matcher to filter file lists (dirstate not updated)
1721 mergeancestor = whether it is merging with an ancestor. If true,
1723 mergeancestor = whether it is merging with an ancestor. If true,
1722 we should accept the incoming changes for any prompts that occur.
1724 we should accept the incoming changes for any prompts that occur.
1723 If false, merging with an ancestor (fast-forward) is only allowed
1725 If false, merging with an ancestor (fast-forward) is only allowed
1724 between different named branches. This flag is used by rebase extension
1726 between different named branches. This flag is used by rebase extension
1725 as a temporary fix and should be avoided in general.
1727 as a temporary fix and should be avoided in general.
1726 labels = labels to use for base, local and other
1728 labels = labels to use for base, local and other
1727 mergeforce = whether the merge was run with 'merge --force' (deprecated): if
1729 mergeforce = whether the merge was run with 'merge --force' (deprecated): if
1728 this is True, then 'force' should be True as well.
1730 this is True, then 'force' should be True as well.
1729
1731
1730 The table below shows all the behaviors of the update command given the
1732 The table below shows all the behaviors of the update command given the
1731 -c/--check and -C/--clean or no options, whether the working directory is
1733 -c/--check and -C/--clean or no options, whether the working directory is
1732 dirty, whether a revision is specified, and the relationship of the parent
1734 dirty, whether a revision is specified, and the relationship of the parent
1733 rev to the target rev (linear or not). Match from top first. The -n
1735 rev to the target rev (linear or not). Match from top first. The -n
1734 option doesn't exist on the command line, but represents the
1736 option doesn't exist on the command line, but represents the
1735 experimental.updatecheck=noconflict option.
1737 experimental.updatecheck=noconflict option.
1736
1738
1737 This logic is tested by test-update-branches.t.
1739 This logic is tested by test-update-branches.t.
1738
1740
1739 -c -C -n -m dirty rev linear | result
1741 -c -C -n -m dirty rev linear | result
1740 y y * * * * * | (1)
1742 y y * * * * * | (1)
1741 y * y * * * * | (1)
1743 y * y * * * * | (1)
1742 y * * y * * * | (1)
1744 y * * y * * * | (1)
1743 * y y * * * * | (1)
1745 * y y * * * * | (1)
1744 * y * y * * * | (1)
1746 * y * y * * * | (1)
1745 * * y y * * * | (1)
1747 * * y y * * * | (1)
1746 * * * * * n n | x
1748 * * * * * n n | x
1747 * * * * n * * | ok
1749 * * * * n * * | ok
1748 n n n n y * y | merge
1750 n n n n y * y | merge
1749 n n n n y y n | (2)
1751 n n n n y y n | (2)
1750 n n n y y * * | merge
1752 n n n y y * * | merge
1751 n n y n y * * | merge if no conflict
1753 n n y n y * * | merge if no conflict
1752 n y n n y * * | discard
1754 n y n n y * * | discard
1753 y n n n y * * | (3)
1755 y n n n y * * | (3)
1754
1756
1755 x = can't happen
1757 x = can't happen
1756 * = don't-care
1758 * = don't-care
1757 1 = incompatible options (checked in commands.py)
1759 1 = incompatible options (checked in commands.py)
1758 2 = abort: uncommitted changes (commit or update --clean to discard changes)
1760 2 = abort: uncommitted changes (commit or update --clean to discard changes)
1759 3 = abort: uncommitted changes (checked in commands.py)
1761 3 = abort: uncommitted changes (checked in commands.py)
1760
1762
1761 The merge is performed inside ``wc``, a workingctx-like objects. It defaults
1763 The merge is performed inside ``wc``, a workingctx-like objects. It defaults
1762 to repo[None] if None is passed.
1764 to repo[None] if None is passed.
1763
1765
1764 Return the same tuple as applyupdates().
1766 Return the same tuple as applyupdates().
1765 """
1767 """
1766 # Avoid cycle.
1768 # Avoid cycle.
1767 from . import sparse
1769 from . import sparse
1768
1770
1769 # This function used to find the default destination if node was None, but
1771 # This function used to find the default destination if node was None, but
1770 # that's now in destutil.py.
1772 # that's now in destutil.py.
1771 assert node is not None
1773 assert node is not None
1772 if not branchmerge and not force:
1774 if not branchmerge and not force:
1773 # TODO: remove the default once all callers that pass branchmerge=False
1775 # TODO: remove the default once all callers that pass branchmerge=False
1774 # and force=False pass a value for updatecheck. We may want to allow
1776 # and force=False pass a value for updatecheck. We may want to allow
1775 # updatecheck='abort' to better suppport some of these callers.
1777 # updatecheck='abort' to better suppport some of these callers.
1776 if updatecheck is None:
1778 if updatecheck is None:
1777 updatecheck = 'linear'
1779 updatecheck = 'linear'
1778 assert updatecheck in ('none', 'linear', 'noconflict')
1780 assert updatecheck in ('none', 'linear', 'noconflict')
1779 # If we're doing a partial update, we need to skip updating
1781 # If we're doing a partial update, we need to skip updating
1780 # the dirstate, so make a note of any partial-ness to the
1782 # the dirstate, so make a note of any partial-ness to the
1781 # update here.
1783 # update here.
1782 if matcher is None or matcher.always():
1784 if matcher is None or matcher.always():
1783 partial = False
1785 partial = False
1784 else:
1786 else:
1785 partial = True
1787 partial = True
1786 with repo.wlock():
1788 with repo.wlock():
1787 if wc is None:
1789 if wc is None:
1788 wc = repo[None]
1790 wc = repo[None]
1789 pl = wc.parents()
1791 pl = wc.parents()
1790 p1 = pl[0]
1792 p1 = pl[0]
1791 pas = [None]
1793 pas = [None]
1792 if ancestor is not None:
1794 if ancestor is not None:
1793 pas = [repo[ancestor]]
1795 pas = [repo[ancestor]]
1794
1796
1795 overwrite = force and not branchmerge
1797 overwrite = force and not branchmerge
1796
1798
1797 p2 = repo[node]
1799 p2 = repo[node]
1798 if pas[0] is None:
1800 if pas[0] is None:
1799 if repo.ui.configlist('merge', 'preferancestor') == ['*']:
1801 if repo.ui.configlist('merge', 'preferancestor') == ['*']:
1800 cahs = repo.changelog.commonancestorsheads(p1.node(), p2.node())
1802 cahs = repo.changelog.commonancestorsheads(p1.node(), p2.node())
1801 pas = [repo[anc] for anc in (sorted(cahs) or [nullid])]
1803 pas = [repo[anc] for anc in (sorted(cahs) or [nullid])]
1802 else:
1804 else:
1803 pas = [p1.ancestor(p2, warn=branchmerge)]
1805 pas = [p1.ancestor(p2, warn=branchmerge)]
1804
1806
1805 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
1807 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
1806
1808
1807 ### check phase
1809 ### check phase
1808 if not overwrite:
1810 if not overwrite:
1809 if len(pl) > 1:
1811 if len(pl) > 1:
1810 raise error.Abort(_("outstanding uncommitted merge"))
1812 raise error.Abort(_("outstanding uncommitted merge"))
1811 ms = mergestate.read(repo)
1813 ms = mergestate.read(repo)
1812 if list(ms.unresolved()):
1814 if list(ms.unresolved()):
1813 raise error.Abort(_("outstanding merge conflicts"))
1815 raise error.Abort(_("outstanding merge conflicts"))
1814 if branchmerge:
1816 if branchmerge:
1815 if pas == [p2]:
1817 if pas == [p2]:
1816 raise error.Abort(_("merging with a working directory ancestor"
1818 raise error.Abort(_("merging with a working directory ancestor"
1817 " has no effect"))
1819 " has no effect"))
1818 elif pas == [p1]:
1820 elif pas == [p1]:
1819 if not mergeancestor and wc.branch() == p2.branch():
1821 if not mergeancestor and wc.branch() == p2.branch():
1820 raise error.Abort(_("nothing to merge"),
1822 raise error.Abort(_("nothing to merge"),
1821 hint=_("use 'hg update' "
1823 hint=_("use 'hg update' "
1822 "or check 'hg heads'"))
1824 "or check 'hg heads'"))
1823 if not force and (wc.files() or wc.deleted()):
1825 if not force and (wc.files() or wc.deleted()):
1824 raise error.Abort(_("uncommitted changes"),
1826 raise error.Abort(_("uncommitted changes"),
1825 hint=_("use 'hg status' to list changes"))
1827 hint=_("use 'hg status' to list changes"))
1826 for s in sorted(wc.substate):
1828 for s in sorted(wc.substate):
1827 wc.sub(s).bailifchanged()
1829 wc.sub(s).bailifchanged()
1828
1830
1829 elif not overwrite:
1831 elif not overwrite:
1830 if p1 == p2: # no-op update
1832 if p1 == p2: # no-op update
1831 # call the hooks and exit early
1833 # call the hooks and exit early
1832 repo.hook('preupdate', throw=True, parent1=xp2, parent2='')
1834 repo.hook('preupdate', throw=True, parent1=xp2, parent2='')
1833 repo.hook('update', parent1=xp2, parent2='', error=0)
1835 repo.hook('update', parent1=xp2, parent2='', error=0)
1834 return 0, 0, 0, 0
1836 return 0, 0, 0, 0
1835
1837
1836 if (updatecheck == 'linear' and
1838 if (updatecheck == 'linear' and
1837 pas not in ([p1], [p2])): # nonlinear
1839 pas not in ([p1], [p2])): # nonlinear
1838 dirty = wc.dirty(missing=True)
1840 dirty = wc.dirty(missing=True)
1839 if dirty:
1841 if dirty:
1840 # Branching is a bit strange to ensure we do the minimal
1842 # Branching is a bit strange to ensure we do the minimal
1841 # amount of call to obsutil.foreground.
1843 # amount of call to obsutil.foreground.
1842 foreground = obsutil.foreground(repo, [p1.node()])
1844 foreground = obsutil.foreground(repo, [p1.node()])
1843 # note: the <node> variable contains a random identifier
1845 # note: the <node> variable contains a random identifier
1844 if repo[node].node() in foreground:
1846 if repo[node].node() in foreground:
1845 pass # allow updating to successors
1847 pass # allow updating to successors
1846 else:
1848 else:
1847 msg = _("uncommitted changes")
1849 msg = _("uncommitted changes")
1848 hint = _("commit or update --clean to discard changes")
1850 hint = _("commit or update --clean to discard changes")
1849 raise error.UpdateAbort(msg, hint=hint)
1851 raise error.UpdateAbort(msg, hint=hint)
1850 else:
1852 else:
1851 # Allow jumping branches if clean and specific rev given
1853 # Allow jumping branches if clean and specific rev given
1852 pass
1854 pass
1853
1855
1854 if overwrite:
1856 if overwrite:
1855 pas = [wc]
1857 pas = [wc]
1856 elif not branchmerge:
1858 elif not branchmerge:
1857 pas = [p1]
1859 pas = [p1]
1858
1860
1859 # deprecated config: merge.followcopies
1861 # deprecated config: merge.followcopies
1860 followcopies = repo.ui.configbool('merge', 'followcopies')
1862 followcopies = repo.ui.configbool('merge', 'followcopies')
1861 if overwrite:
1863 if overwrite:
1862 followcopies = False
1864 followcopies = False
1863 elif not pas[0]:
1865 elif not pas[0]:
1864 followcopies = False
1866 followcopies = False
1865 if not branchmerge and not wc.dirty(missing=True):
1867 if not branchmerge and not wc.dirty(missing=True):
1866 followcopies = False
1868 followcopies = False
1867
1869
1868 ### calculate phase
1870 ### calculate phase
1869 actionbyfile, diverge, renamedelete = calculateupdates(
1871 actionbyfile, diverge, renamedelete = calculateupdates(
1870 repo, wc, p2, pas, branchmerge, force, mergeancestor,
1872 repo, wc, p2, pas, branchmerge, force, mergeancestor,
1871 followcopies, matcher=matcher, mergeforce=mergeforce)
1873 followcopies, matcher=matcher, mergeforce=mergeforce)
1872
1874
1873 if updatecheck == 'noconflict':
1875 if updatecheck == 'noconflict':
1874 for f, (m, args, msg) in actionbyfile.iteritems():
1876 for f, (m, args, msg) in actionbyfile.iteritems():
1875 if m not in ('g', 'k', 'e', 'r', 'pr'):
1877 if m not in ('g', 'k', 'e', 'r', 'pr'):
1876 msg = _("conflicting changes")
1878 msg = _("conflicting changes")
1877 hint = _("commit or update --clean to discard changes")
1879 hint = _("commit or update --clean to discard changes")
1878 raise error.Abort(msg, hint=hint)
1880 raise error.Abort(msg, hint=hint)
1879
1881
1880 # Prompt and create actions. Most of this is in the resolve phase
1882 # Prompt and create actions. Most of this is in the resolve phase
1881 # already, but we can't handle .hgsubstate in filemerge or
1883 # already, but we can't handle .hgsubstate in filemerge or
1882 # subrepo.submerge yet so we have to keep prompting for it.
1884 # subrepo.submerge yet so we have to keep prompting for it.
1883 if '.hgsubstate' in actionbyfile:
1885 if '.hgsubstate' in actionbyfile:
1884 f = '.hgsubstate'
1886 f = '.hgsubstate'
1885 m, args, msg = actionbyfile[f]
1887 m, args, msg = actionbyfile[f]
1886 prompts = filemerge.partextras(labels)
1888 prompts = filemerge.partextras(labels)
1887 prompts['f'] = f
1889 prompts['f'] = f
1888 if m == 'cd':
1890 if m == 'cd':
1889 if repo.ui.promptchoice(
1891 if repo.ui.promptchoice(
1890 _("local%(l)s changed %(f)s which other%(o)s deleted\n"
1892 _("local%(l)s changed %(f)s which other%(o)s deleted\n"
1891 "use (c)hanged version or (d)elete?"
1893 "use (c)hanged version or (d)elete?"
1892 "$$ &Changed $$ &Delete") % prompts, 0):
1894 "$$ &Changed $$ &Delete") % prompts, 0):
1893 actionbyfile[f] = ('r', None, "prompt delete")
1895 actionbyfile[f] = ('r', None, "prompt delete")
1894 elif f in p1:
1896 elif f in p1:
1895 actionbyfile[f] = ('am', None, "prompt keep")
1897 actionbyfile[f] = ('am', None, "prompt keep")
1896 else:
1898 else:
1897 actionbyfile[f] = ('a', None, "prompt keep")
1899 actionbyfile[f] = ('a', None, "prompt keep")
1898 elif m == 'dc':
1900 elif m == 'dc':
1899 f1, f2, fa, move, anc = args
1901 f1, f2, fa, move, anc = args
1900 flags = p2[f2].flags()
1902 flags = p2[f2].flags()
1901 if repo.ui.promptchoice(
1903 if repo.ui.promptchoice(
1902 _("other%(o)s changed %(f)s which local%(l)s deleted\n"
1904 _("other%(o)s changed %(f)s which local%(l)s deleted\n"
1903 "use (c)hanged version or leave (d)eleted?"
1905 "use (c)hanged version or leave (d)eleted?"
1904 "$$ &Changed $$ &Deleted") % prompts, 0) == 0:
1906 "$$ &Changed $$ &Deleted") % prompts, 0) == 0:
1905 actionbyfile[f] = ('g', (flags, False), "prompt recreating")
1907 actionbyfile[f] = ('g', (flags, False), "prompt recreating")
1906 else:
1908 else:
1907 del actionbyfile[f]
1909 del actionbyfile[f]
1908
1910
1909 # Convert to dictionary-of-lists format
1911 # Convert to dictionary-of-lists format
1910 actions = dict((m, [])
1912 actions = dict((m, [])
1911 for m in 'a am f g cd dc r dm dg m e k p pr'.split())
1913 for m in 'a am f g cd dc r dm dg m e k p pr'.split())
1912 for f, (m, args, msg) in actionbyfile.iteritems():
1914 for f, (m, args, msg) in actionbyfile.iteritems():
1913 if m not in actions:
1915 if m not in actions:
1914 actions[m] = []
1916 actions[m] = []
1915 actions[m].append((f, args, msg))
1917 actions[m].append((f, args, msg))
1916
1918
1917 if not util.fscasesensitive(repo.path):
1919 if not util.fscasesensitive(repo.path):
1918 # check collision between files only in p2 for clean update
1920 # check collision between files only in p2 for clean update
1919 if (not branchmerge and
1921 if (not branchmerge and
1920 (force or not wc.dirty(missing=True, branch=False))):
1922 (force or not wc.dirty(missing=True, branch=False))):
1921 _checkcollision(repo, p2.manifest(), None)
1923 _checkcollision(repo, p2.manifest(), None)
1922 else:
1924 else:
1923 _checkcollision(repo, wc.manifest(), actions)
1925 _checkcollision(repo, wc.manifest(), actions)
1924
1926
1925 # divergent renames
1927 # divergent renames
1926 for f, fl in sorted(diverge.iteritems()):
1928 for f, fl in sorted(diverge.iteritems()):
1927 repo.ui.warn(_("note: possible conflict - %s was renamed "
1929 repo.ui.warn(_("note: possible conflict - %s was renamed "
1928 "multiple times to:\n") % f)
1930 "multiple times to:\n") % f)
1929 for nf in fl:
1931 for nf in fl:
1930 repo.ui.warn(" %s\n" % nf)
1932 repo.ui.warn(" %s\n" % nf)
1931
1933
1932 # rename and delete
1934 # rename and delete
1933 for f, fl in sorted(renamedelete.iteritems()):
1935 for f, fl in sorted(renamedelete.iteritems()):
1934 repo.ui.warn(_("note: possible conflict - %s was deleted "
1936 repo.ui.warn(_("note: possible conflict - %s was deleted "
1935 "and renamed to:\n") % f)
1937 "and renamed to:\n") % f)
1936 for nf in fl:
1938 for nf in fl:
1937 repo.ui.warn(" %s\n" % nf)
1939 repo.ui.warn(" %s\n" % nf)
1938
1940
1939 ### apply phase
1941 ### apply phase
1940 if not branchmerge: # just jump to the new rev
1942 if not branchmerge: # just jump to the new rev
1941 fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
1943 fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
1942 if not partial:
1944 if not partial:
1943 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
1945 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
1944 # note that we're in the middle of an update
1946 # note that we're in the middle of an update
1945 repo.vfs.write('updatestate', p2.hex())
1947 repo.vfs.write('updatestate', p2.hex())
1946
1948
1947 # Advertise fsmonitor when its presence could be useful.
1949 # Advertise fsmonitor when its presence could be useful.
1948 #
1950 #
1949 # We only advertise when performing an update from an empty working
1951 # We only advertise when performing an update from an empty working
1950 # directory. This typically only occurs during initial clone.
1952 # directory. This typically only occurs during initial clone.
1951 #
1953 #
1952 # We give users a mechanism to disable the warning in case it is
1954 # We give users a mechanism to disable the warning in case it is
1953 # annoying.
1955 # annoying.
1954 #
1956 #
1955 # We only allow on Linux and MacOS because that's where fsmonitor is
1957 # We only allow on Linux and MacOS because that's where fsmonitor is
1956 # considered stable.
1958 # considered stable.
1957 fsmonitorwarning = repo.ui.configbool('fsmonitor', 'warn_when_unused')
1959 fsmonitorwarning = repo.ui.configbool('fsmonitor', 'warn_when_unused')
1958 fsmonitorthreshold = repo.ui.configint('fsmonitor',
1960 fsmonitorthreshold = repo.ui.configint('fsmonitor',
1959 'warn_update_file_count')
1961 'warn_update_file_count')
1960 try:
1962 try:
1961 extensions.find('fsmonitor')
1963 extensions.find('fsmonitor')
1962 fsmonitorenabled = repo.ui.config('fsmonitor', 'mode') != 'off'
1964 fsmonitorenabled = repo.ui.config('fsmonitor', 'mode') != 'off'
1963 # We intentionally don't look at whether fsmonitor has disabled
1965 # We intentionally don't look at whether fsmonitor has disabled
1964 # itself because a) fsmonitor may have already printed a warning
1966 # itself because a) fsmonitor may have already printed a warning
1965 # b) we only care about the config state here.
1967 # b) we only care about the config state here.
1966 except KeyError:
1968 except KeyError:
1967 fsmonitorenabled = False
1969 fsmonitorenabled = False
1968
1970
1969 if (fsmonitorwarning
1971 if (fsmonitorwarning
1970 and not fsmonitorenabled
1972 and not fsmonitorenabled
1971 and p1.node() == nullid
1973 and p1.node() == nullid
1972 and len(actions['g']) >= fsmonitorthreshold
1974 and len(actions['g']) >= fsmonitorthreshold
1973 and pycompat.sysplatform.startswith(('linux', 'darwin'))):
1975 and pycompat.sysplatform.startswith(('linux', 'darwin'))):
1974 repo.ui.warn(
1976 repo.ui.warn(
1975 _('(warning: large working directory being used without '
1977 _('(warning: large working directory being used without '
1976 'fsmonitor enabled; enable fsmonitor to improve performance; '
1978 'fsmonitor enabled; enable fsmonitor to improve performance; '
1977 'see "hg help -e fsmonitor")\n'))
1979 'see "hg help -e fsmonitor")\n'))
1978
1980
1979 stats = applyupdates(repo, actions, wc, p2, overwrite, labels=labels)
1981 stats = applyupdates(repo, actions, wc, p2, overwrite, labels=labels)
1980 wc.flushall()
1982 wc.flushall()
1981
1983
1982 if not partial:
1984 if not partial:
1983 with repo.dirstate.parentchange():
1985 with repo.dirstate.parentchange():
1984 repo.setparents(fp1, fp2)
1986 repo.setparents(fp1, fp2)
1985 recordupdates(repo, actions, branchmerge)
1987 recordupdates(repo, actions, branchmerge)
1986 # update completed, clear state
1988 # update completed, clear state
1987 util.unlink(repo.vfs.join('updatestate'))
1989 util.unlink(repo.vfs.join('updatestate'))
1988
1990
1989 if not branchmerge:
1991 if not branchmerge:
1990 repo.dirstate.setbranch(p2.branch())
1992 repo.dirstate.setbranch(p2.branch())
1991
1993
1992 # If we're updating to a location, clean up any stale temporary includes
1994 # If we're updating to a location, clean up any stale temporary includes
1993 # (ex: this happens during hg rebase --abort).
1995 # (ex: this happens during hg rebase --abort).
1994 if not branchmerge:
1996 if not branchmerge:
1995 sparse.prunetemporaryincludes(repo)
1997 sparse.prunetemporaryincludes(repo)
1996
1998
1997 if not partial:
1999 if not partial:
1998 repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
2000 repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
1999 return stats
2001 return stats
2000
2002
2001 def graft(repo, ctx, pctx, labels, keepparent=False):
2003 def graft(repo, ctx, pctx, labels, keepparent=False):
2002 """Do a graft-like merge.
2004 """Do a graft-like merge.
2003
2005
2004 This is a merge where the merge ancestor is chosen such that one
2006 This is a merge where the merge ancestor is chosen such that one
2005 or more changesets are grafted onto the current changeset. In
2007 or more changesets are grafted onto the current changeset. In
2006 addition to the merge, this fixes up the dirstate to include only
2008 addition to the merge, this fixes up the dirstate to include only
2007 a single parent (if keepparent is False) and tries to duplicate any
2009 a single parent (if keepparent is False) and tries to duplicate any
2008 renames/copies appropriately.
2010 renames/copies appropriately.
2009
2011
2010 ctx - changeset to rebase
2012 ctx - changeset to rebase
2011 pctx - merge base, usually ctx.p1()
2013 pctx - merge base, usually ctx.p1()
2012 labels - merge labels eg ['local', 'graft']
2014 labels - merge labels eg ['local', 'graft']
2013 keepparent - keep second parent if any
2015 keepparent - keep second parent if any
2014
2016
2015 """
2017 """
2016 # If we're grafting a descendant onto an ancestor, be sure to pass
2018 # If we're grafting a descendant onto an ancestor, be sure to pass
2017 # mergeancestor=True to update. This does two things: 1) allows the merge if
2019 # mergeancestor=True to update. This does two things: 1) allows the merge if
2018 # the destination is the same as the parent of the ctx (so we can use graft
2020 # the destination is the same as the parent of the ctx (so we can use graft
2019 # to copy commits), and 2) informs update that the incoming changes are
2021 # to copy commits), and 2) informs update that the incoming changes are
2020 # newer than the destination so it doesn't prompt about "remote changed foo
2022 # newer than the destination so it doesn't prompt about "remote changed foo
2021 # which local deleted".
2023 # which local deleted".
2022 mergeancestor = repo.changelog.isancestor(repo['.'].node(), ctx.node())
2024 mergeancestor = repo.changelog.isancestor(repo['.'].node(), ctx.node())
2023
2025
2024 stats = update(repo, ctx.node(), True, True, pctx.node(),
2026 stats = update(repo, ctx.node(), True, True, pctx.node(),
2025 mergeancestor=mergeancestor, labels=labels)
2027 mergeancestor=mergeancestor, labels=labels)
2026
2028
2027 pother = nullid
2029 pother = nullid
2028 parents = ctx.parents()
2030 parents = ctx.parents()
2029 if keepparent and len(parents) == 2 and pctx in parents:
2031 if keepparent and len(parents) == 2 and pctx in parents:
2030 parents.remove(pctx)
2032 parents.remove(pctx)
2031 pother = parents[0].node()
2033 pother = parents[0].node()
2032
2034
2033 with repo.dirstate.parentchange():
2035 with repo.dirstate.parentchange():
2034 repo.setparents(repo['.'].node(), pother)
2036 repo.setparents(repo['.'].node(), pother)
2035 repo.dirstate.write(repo.currenttransaction())
2037 repo.dirstate.write(repo.currenttransaction())
2036 # fix up dirstate for copies and renames
2038 # fix up dirstate for copies and renames
2037 copies.duplicatecopies(repo, repo[None], ctx.rev(), pctx.rev())
2039 copies.duplicatecopies(repo, repo[None], ctx.rev(), pctx.rev())
2038 return stats
2040 return stats
General Comments 0
You need to be logged in to leave comments. Login now