##// END OF EJS Templates
merge: mark file gets as not thread safe (issue5933)...
Gregory Szorc -
r38755:be498426 default
parent child Browse files
Show More
@@ -1,1376 +1,1379
1 # configitems.py - centralized declaration of configuration option
1 # configitems.py - centralized declaration of configuration option
2 #
2 #
3 # Copyright 2017 Pierre-Yves David <pierre-yves.david@octobus.net>
3 # Copyright 2017 Pierre-Yves David <pierre-yves.david@octobus.net>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import functools
10 import functools
11 import re
11 import re
12
12
13 from . import (
13 from . import (
14 encoding,
14 encoding,
15 error,
15 error,
16 )
16 )
17
17
18 def loadconfigtable(ui, extname, configtable):
18 def loadconfigtable(ui, extname, configtable):
19 """update config item known to the ui with the extension ones"""
19 """update config item known to the ui with the extension ones"""
20 for section, items in sorted(configtable.items()):
20 for section, items in sorted(configtable.items()):
21 knownitems = ui._knownconfig.setdefault(section, itemregister())
21 knownitems = ui._knownconfig.setdefault(section, itemregister())
22 knownkeys = set(knownitems)
22 knownkeys = set(knownitems)
23 newkeys = set(items)
23 newkeys = set(items)
24 for key in sorted(knownkeys & newkeys):
24 for key in sorted(knownkeys & newkeys):
25 msg = "extension '%s' overwrite config item '%s.%s'"
25 msg = "extension '%s' overwrite config item '%s.%s'"
26 msg %= (extname, section, key)
26 msg %= (extname, section, key)
27 ui.develwarn(msg, config='warn-config')
27 ui.develwarn(msg, config='warn-config')
28
28
29 knownitems.update(items)
29 knownitems.update(items)
30
30
31 class configitem(object):
31 class configitem(object):
32 """represent a known config item
32 """represent a known config item
33
33
34 :section: the official config section where to find this item,
34 :section: the official config section where to find this item,
35 :name: the official name within the section,
35 :name: the official name within the section,
36 :default: default value for this item,
36 :default: default value for this item,
37 :alias: optional list of tuples as alternatives,
37 :alias: optional list of tuples as alternatives,
38 :generic: this is a generic definition, match name using regular expression.
38 :generic: this is a generic definition, match name using regular expression.
39 """
39 """
40
40
41 def __init__(self, section, name, default=None, alias=(),
41 def __init__(self, section, name, default=None, alias=(),
42 generic=False, priority=0):
42 generic=False, priority=0):
43 self.section = section
43 self.section = section
44 self.name = name
44 self.name = name
45 self.default = default
45 self.default = default
46 self.alias = list(alias)
46 self.alias = list(alias)
47 self.generic = generic
47 self.generic = generic
48 self.priority = priority
48 self.priority = priority
49 self._re = None
49 self._re = None
50 if generic:
50 if generic:
51 self._re = re.compile(self.name)
51 self._re = re.compile(self.name)
52
52
53 class itemregister(dict):
53 class itemregister(dict):
54 """A specialized dictionary that can handle wild-card selection"""
54 """A specialized dictionary that can handle wild-card selection"""
55
55
56 def __init__(self):
56 def __init__(self):
57 super(itemregister, self).__init__()
57 super(itemregister, self).__init__()
58 self._generics = set()
58 self._generics = set()
59
59
60 def update(self, other):
60 def update(self, other):
61 super(itemregister, self).update(other)
61 super(itemregister, self).update(other)
62 self._generics.update(other._generics)
62 self._generics.update(other._generics)
63
63
64 def __setitem__(self, key, item):
64 def __setitem__(self, key, item):
65 super(itemregister, self).__setitem__(key, item)
65 super(itemregister, self).__setitem__(key, item)
66 if item.generic:
66 if item.generic:
67 self._generics.add(item)
67 self._generics.add(item)
68
68
69 def get(self, key):
69 def get(self, key):
70 baseitem = super(itemregister, self).get(key)
70 baseitem = super(itemregister, self).get(key)
71 if baseitem is not None and not baseitem.generic:
71 if baseitem is not None and not baseitem.generic:
72 return baseitem
72 return baseitem
73
73
74 # search for a matching generic item
74 # search for a matching generic item
75 generics = sorted(self._generics, key=(lambda x: (x.priority, x.name)))
75 generics = sorted(self._generics, key=(lambda x: (x.priority, x.name)))
76 for item in generics:
76 for item in generics:
77 # we use 'match' instead of 'search' to make the matching simpler
77 # we use 'match' instead of 'search' to make the matching simpler
78 # for people unfamiliar with regular expression. Having the match
78 # for people unfamiliar with regular expression. Having the match
79 # rooted to the start of the string will produce less surprising
79 # rooted to the start of the string will produce less surprising
80 # result for user writing simple regex for sub-attribute.
80 # result for user writing simple regex for sub-attribute.
81 #
81 #
82 # For example using "color\..*" match produces an unsurprising
82 # For example using "color\..*" match produces an unsurprising
83 # result, while using search could suddenly match apparently
83 # result, while using search could suddenly match apparently
84 # unrelated configuration that happens to contains "color."
84 # unrelated configuration that happens to contains "color."
85 # anywhere. This is a tradeoff where we favor requiring ".*" on
85 # anywhere. This is a tradeoff where we favor requiring ".*" on
86 # some match to avoid the need to prefix most pattern with "^".
86 # some match to avoid the need to prefix most pattern with "^".
87 # The "^" seems more error prone.
87 # The "^" seems more error prone.
88 if item._re.match(key):
88 if item._re.match(key):
89 return item
89 return item
90
90
91 return None
91 return None
92
92
93 coreitems = {}
93 coreitems = {}
94
94
95 def _register(configtable, *args, **kwargs):
95 def _register(configtable, *args, **kwargs):
96 item = configitem(*args, **kwargs)
96 item = configitem(*args, **kwargs)
97 section = configtable.setdefault(item.section, itemregister())
97 section = configtable.setdefault(item.section, itemregister())
98 if item.name in section:
98 if item.name in section:
99 msg = "duplicated config item registration for '%s.%s'"
99 msg = "duplicated config item registration for '%s.%s'"
100 raise error.ProgrammingError(msg % (item.section, item.name))
100 raise error.ProgrammingError(msg % (item.section, item.name))
101 section[item.name] = item
101 section[item.name] = item
102
102
103 # special value for case where the default is derived from other values
103 # special value for case where the default is derived from other values
104 dynamicdefault = object()
104 dynamicdefault = object()
105
105
106 # Registering actual config items
106 # Registering actual config items
107
107
108 def getitemregister(configtable):
108 def getitemregister(configtable):
109 f = functools.partial(_register, configtable)
109 f = functools.partial(_register, configtable)
110 # export pseudo enum as configitem.*
110 # export pseudo enum as configitem.*
111 f.dynamicdefault = dynamicdefault
111 f.dynamicdefault = dynamicdefault
112 return f
112 return f
113
113
114 coreconfigitem = getitemregister(coreitems)
114 coreconfigitem = getitemregister(coreitems)
115
115
116 coreconfigitem('alias', '.*',
116 coreconfigitem('alias', '.*',
117 default=dynamicdefault,
117 default=dynamicdefault,
118 generic=True,
118 generic=True,
119 )
119 )
120 coreconfigitem('annotate', 'nodates',
120 coreconfigitem('annotate', 'nodates',
121 default=False,
121 default=False,
122 )
122 )
123 coreconfigitem('annotate', 'showfunc',
123 coreconfigitem('annotate', 'showfunc',
124 default=False,
124 default=False,
125 )
125 )
126 coreconfigitem('annotate', 'unified',
126 coreconfigitem('annotate', 'unified',
127 default=None,
127 default=None,
128 )
128 )
129 coreconfigitem('annotate', 'git',
129 coreconfigitem('annotate', 'git',
130 default=False,
130 default=False,
131 )
131 )
132 coreconfigitem('annotate', 'ignorews',
132 coreconfigitem('annotate', 'ignorews',
133 default=False,
133 default=False,
134 )
134 )
135 coreconfigitem('annotate', 'ignorewsamount',
135 coreconfigitem('annotate', 'ignorewsamount',
136 default=False,
136 default=False,
137 )
137 )
138 coreconfigitem('annotate', 'ignoreblanklines',
138 coreconfigitem('annotate', 'ignoreblanklines',
139 default=False,
139 default=False,
140 )
140 )
141 coreconfigitem('annotate', 'ignorewseol',
141 coreconfigitem('annotate', 'ignorewseol',
142 default=False,
142 default=False,
143 )
143 )
144 coreconfigitem('annotate', 'nobinary',
144 coreconfigitem('annotate', 'nobinary',
145 default=False,
145 default=False,
146 )
146 )
147 coreconfigitem('annotate', 'noprefix',
147 coreconfigitem('annotate', 'noprefix',
148 default=False,
148 default=False,
149 )
149 )
150 coreconfigitem('annotate', 'word-diff',
150 coreconfigitem('annotate', 'word-diff',
151 default=False,
151 default=False,
152 )
152 )
153 coreconfigitem('auth', 'cookiefile',
153 coreconfigitem('auth', 'cookiefile',
154 default=None,
154 default=None,
155 )
155 )
156 # bookmarks.pushing: internal hack for discovery
156 # bookmarks.pushing: internal hack for discovery
157 coreconfigitem('bookmarks', 'pushing',
157 coreconfigitem('bookmarks', 'pushing',
158 default=list,
158 default=list,
159 )
159 )
160 # bundle.mainreporoot: internal hack for bundlerepo
160 # bundle.mainreporoot: internal hack for bundlerepo
161 coreconfigitem('bundle', 'mainreporoot',
161 coreconfigitem('bundle', 'mainreporoot',
162 default='',
162 default='',
163 )
163 )
164 # bundle.reorder: experimental config
164 # bundle.reorder: experimental config
165 coreconfigitem('bundle', 'reorder',
165 coreconfigitem('bundle', 'reorder',
166 default='auto',
166 default='auto',
167 )
167 )
168 coreconfigitem('censor', 'policy',
168 coreconfigitem('censor', 'policy',
169 default='abort',
169 default='abort',
170 )
170 )
171 coreconfigitem('chgserver', 'idletimeout',
171 coreconfigitem('chgserver', 'idletimeout',
172 default=3600,
172 default=3600,
173 )
173 )
174 coreconfigitem('chgserver', 'skiphash',
174 coreconfigitem('chgserver', 'skiphash',
175 default=False,
175 default=False,
176 )
176 )
177 coreconfigitem('cmdserver', 'log',
177 coreconfigitem('cmdserver', 'log',
178 default=None,
178 default=None,
179 )
179 )
180 coreconfigitem('color', '.*',
180 coreconfigitem('color', '.*',
181 default=None,
181 default=None,
182 generic=True,
182 generic=True,
183 )
183 )
184 coreconfigitem('color', 'mode',
184 coreconfigitem('color', 'mode',
185 default='auto',
185 default='auto',
186 )
186 )
187 coreconfigitem('color', 'pagermode',
187 coreconfigitem('color', 'pagermode',
188 default=dynamicdefault,
188 default=dynamicdefault,
189 )
189 )
190 coreconfigitem('commands', 'grep.all-files',
190 coreconfigitem('commands', 'grep.all-files',
191 default=False,
191 default=False,
192 )
192 )
193 coreconfigitem('commands', 'show.aliasprefix',
193 coreconfigitem('commands', 'show.aliasprefix',
194 default=list,
194 default=list,
195 )
195 )
196 coreconfigitem('commands', 'status.relative',
196 coreconfigitem('commands', 'status.relative',
197 default=False,
197 default=False,
198 )
198 )
199 coreconfigitem('commands', 'status.skipstates',
199 coreconfigitem('commands', 'status.skipstates',
200 default=[],
200 default=[],
201 )
201 )
202 coreconfigitem('commands', 'status.terse',
202 coreconfigitem('commands', 'status.terse',
203 default='',
203 default='',
204 )
204 )
205 coreconfigitem('commands', 'status.verbose',
205 coreconfigitem('commands', 'status.verbose',
206 default=False,
206 default=False,
207 )
207 )
208 coreconfigitem('commands', 'update.check',
208 coreconfigitem('commands', 'update.check',
209 default=None,
209 default=None,
210 )
210 )
211 coreconfigitem('commands', 'update.requiredest',
211 coreconfigitem('commands', 'update.requiredest',
212 default=False,
212 default=False,
213 )
213 )
214 coreconfigitem('committemplate', '.*',
214 coreconfigitem('committemplate', '.*',
215 default=None,
215 default=None,
216 generic=True,
216 generic=True,
217 )
217 )
218 coreconfigitem('convert', 'bzr.saverev',
218 coreconfigitem('convert', 'bzr.saverev',
219 default=True,
219 default=True,
220 )
220 )
221 coreconfigitem('convert', 'cvsps.cache',
221 coreconfigitem('convert', 'cvsps.cache',
222 default=True,
222 default=True,
223 )
223 )
224 coreconfigitem('convert', 'cvsps.fuzz',
224 coreconfigitem('convert', 'cvsps.fuzz',
225 default=60,
225 default=60,
226 )
226 )
227 coreconfigitem('convert', 'cvsps.logencoding',
227 coreconfigitem('convert', 'cvsps.logencoding',
228 default=None,
228 default=None,
229 )
229 )
230 coreconfigitem('convert', 'cvsps.mergefrom',
230 coreconfigitem('convert', 'cvsps.mergefrom',
231 default=None,
231 default=None,
232 )
232 )
233 coreconfigitem('convert', 'cvsps.mergeto',
233 coreconfigitem('convert', 'cvsps.mergeto',
234 default=None,
234 default=None,
235 )
235 )
236 coreconfigitem('convert', 'git.committeractions',
236 coreconfigitem('convert', 'git.committeractions',
237 default=lambda: ['messagedifferent'],
237 default=lambda: ['messagedifferent'],
238 )
238 )
239 coreconfigitem('convert', 'git.extrakeys',
239 coreconfigitem('convert', 'git.extrakeys',
240 default=list,
240 default=list,
241 )
241 )
242 coreconfigitem('convert', 'git.findcopiesharder',
242 coreconfigitem('convert', 'git.findcopiesharder',
243 default=False,
243 default=False,
244 )
244 )
245 coreconfigitem('convert', 'git.remoteprefix',
245 coreconfigitem('convert', 'git.remoteprefix',
246 default='remote',
246 default='remote',
247 )
247 )
248 coreconfigitem('convert', 'git.renamelimit',
248 coreconfigitem('convert', 'git.renamelimit',
249 default=400,
249 default=400,
250 )
250 )
251 coreconfigitem('convert', 'git.saverev',
251 coreconfigitem('convert', 'git.saverev',
252 default=True,
252 default=True,
253 )
253 )
254 coreconfigitem('convert', 'git.similarity',
254 coreconfigitem('convert', 'git.similarity',
255 default=50,
255 default=50,
256 )
256 )
257 coreconfigitem('convert', 'git.skipsubmodules',
257 coreconfigitem('convert', 'git.skipsubmodules',
258 default=False,
258 default=False,
259 )
259 )
260 coreconfigitem('convert', 'hg.clonebranches',
260 coreconfigitem('convert', 'hg.clonebranches',
261 default=False,
261 default=False,
262 )
262 )
263 coreconfigitem('convert', 'hg.ignoreerrors',
263 coreconfigitem('convert', 'hg.ignoreerrors',
264 default=False,
264 default=False,
265 )
265 )
266 coreconfigitem('convert', 'hg.revs',
266 coreconfigitem('convert', 'hg.revs',
267 default=None,
267 default=None,
268 )
268 )
269 coreconfigitem('convert', 'hg.saverev',
269 coreconfigitem('convert', 'hg.saverev',
270 default=False,
270 default=False,
271 )
271 )
272 coreconfigitem('convert', 'hg.sourcename',
272 coreconfigitem('convert', 'hg.sourcename',
273 default=None,
273 default=None,
274 )
274 )
275 coreconfigitem('convert', 'hg.startrev',
275 coreconfigitem('convert', 'hg.startrev',
276 default=None,
276 default=None,
277 )
277 )
278 coreconfigitem('convert', 'hg.tagsbranch',
278 coreconfigitem('convert', 'hg.tagsbranch',
279 default='default',
279 default='default',
280 )
280 )
281 coreconfigitem('convert', 'hg.usebranchnames',
281 coreconfigitem('convert', 'hg.usebranchnames',
282 default=True,
282 default=True,
283 )
283 )
284 coreconfigitem('convert', 'ignoreancestorcheck',
284 coreconfigitem('convert', 'ignoreancestorcheck',
285 default=False,
285 default=False,
286 )
286 )
287 coreconfigitem('convert', 'localtimezone',
287 coreconfigitem('convert', 'localtimezone',
288 default=False,
288 default=False,
289 )
289 )
290 coreconfigitem('convert', 'p4.encoding',
290 coreconfigitem('convert', 'p4.encoding',
291 default=dynamicdefault,
291 default=dynamicdefault,
292 )
292 )
293 coreconfigitem('convert', 'p4.startrev',
293 coreconfigitem('convert', 'p4.startrev',
294 default=0,
294 default=0,
295 )
295 )
296 coreconfigitem('convert', 'skiptags',
296 coreconfigitem('convert', 'skiptags',
297 default=False,
297 default=False,
298 )
298 )
299 coreconfigitem('convert', 'svn.debugsvnlog',
299 coreconfigitem('convert', 'svn.debugsvnlog',
300 default=True,
300 default=True,
301 )
301 )
302 coreconfigitem('convert', 'svn.trunk',
302 coreconfigitem('convert', 'svn.trunk',
303 default=None,
303 default=None,
304 )
304 )
305 coreconfigitem('convert', 'svn.tags',
305 coreconfigitem('convert', 'svn.tags',
306 default=None,
306 default=None,
307 )
307 )
308 coreconfigitem('convert', 'svn.branches',
308 coreconfigitem('convert', 'svn.branches',
309 default=None,
309 default=None,
310 )
310 )
311 coreconfigitem('convert', 'svn.startrev',
311 coreconfigitem('convert', 'svn.startrev',
312 default=0,
312 default=0,
313 )
313 )
314 coreconfigitem('debug', 'dirstate.delaywrite',
314 coreconfigitem('debug', 'dirstate.delaywrite',
315 default=0,
315 default=0,
316 )
316 )
317 coreconfigitem('defaults', '.*',
317 coreconfigitem('defaults', '.*',
318 default=None,
318 default=None,
319 generic=True,
319 generic=True,
320 )
320 )
321 coreconfigitem('devel', 'all-warnings',
321 coreconfigitem('devel', 'all-warnings',
322 default=False,
322 default=False,
323 )
323 )
324 coreconfigitem('devel', 'bundle2.debug',
324 coreconfigitem('devel', 'bundle2.debug',
325 default=False,
325 default=False,
326 )
326 )
327 coreconfigitem('devel', 'cache-vfs',
327 coreconfigitem('devel', 'cache-vfs',
328 default=None,
328 default=None,
329 )
329 )
330 coreconfigitem('devel', 'check-locks',
330 coreconfigitem('devel', 'check-locks',
331 default=False,
331 default=False,
332 )
332 )
333 coreconfigitem('devel', 'check-relroot',
333 coreconfigitem('devel', 'check-relroot',
334 default=False,
334 default=False,
335 )
335 )
336 coreconfigitem('devel', 'default-date',
336 coreconfigitem('devel', 'default-date',
337 default=None,
337 default=None,
338 )
338 )
339 coreconfigitem('devel', 'deprec-warn',
339 coreconfigitem('devel', 'deprec-warn',
340 default=False,
340 default=False,
341 )
341 )
342 coreconfigitem('devel', 'disableloaddefaultcerts',
342 coreconfigitem('devel', 'disableloaddefaultcerts',
343 default=False,
343 default=False,
344 )
344 )
345 coreconfigitem('devel', 'warn-empty-changegroup',
345 coreconfigitem('devel', 'warn-empty-changegroup',
346 default=False,
346 default=False,
347 )
347 )
348 coreconfigitem('devel', 'legacy.exchange',
348 coreconfigitem('devel', 'legacy.exchange',
349 default=list,
349 default=list,
350 )
350 )
351 coreconfigitem('devel', 'servercafile',
351 coreconfigitem('devel', 'servercafile',
352 default='',
352 default='',
353 )
353 )
354 coreconfigitem('devel', 'serverexactprotocol',
354 coreconfigitem('devel', 'serverexactprotocol',
355 default='',
355 default='',
356 )
356 )
357 coreconfigitem('devel', 'serverrequirecert',
357 coreconfigitem('devel', 'serverrequirecert',
358 default=False,
358 default=False,
359 )
359 )
360 coreconfigitem('devel', 'strip-obsmarkers',
360 coreconfigitem('devel', 'strip-obsmarkers',
361 default=True,
361 default=True,
362 )
362 )
363 coreconfigitem('devel', 'warn-config',
363 coreconfigitem('devel', 'warn-config',
364 default=None,
364 default=None,
365 )
365 )
366 coreconfigitem('devel', 'warn-config-default',
366 coreconfigitem('devel', 'warn-config-default',
367 default=None,
367 default=None,
368 )
368 )
369 coreconfigitem('devel', 'user.obsmarker',
369 coreconfigitem('devel', 'user.obsmarker',
370 default=None,
370 default=None,
371 )
371 )
372 coreconfigitem('devel', 'warn-config-unknown',
372 coreconfigitem('devel', 'warn-config-unknown',
373 default=None,
373 default=None,
374 )
374 )
375 coreconfigitem('devel', 'debug.extensions',
375 coreconfigitem('devel', 'debug.extensions',
376 default=False,
376 default=False,
377 )
377 )
378 coreconfigitem('devel', 'debug.peer-request',
378 coreconfigitem('devel', 'debug.peer-request',
379 default=False,
379 default=False,
380 )
380 )
381 coreconfigitem('diff', 'nodates',
381 coreconfigitem('diff', 'nodates',
382 default=False,
382 default=False,
383 )
383 )
384 coreconfigitem('diff', 'showfunc',
384 coreconfigitem('diff', 'showfunc',
385 default=False,
385 default=False,
386 )
386 )
387 coreconfigitem('diff', 'unified',
387 coreconfigitem('diff', 'unified',
388 default=None,
388 default=None,
389 )
389 )
390 coreconfigitem('diff', 'git',
390 coreconfigitem('diff', 'git',
391 default=False,
391 default=False,
392 )
392 )
393 coreconfigitem('diff', 'ignorews',
393 coreconfigitem('diff', 'ignorews',
394 default=False,
394 default=False,
395 )
395 )
396 coreconfigitem('diff', 'ignorewsamount',
396 coreconfigitem('diff', 'ignorewsamount',
397 default=False,
397 default=False,
398 )
398 )
399 coreconfigitem('diff', 'ignoreblanklines',
399 coreconfigitem('diff', 'ignoreblanklines',
400 default=False,
400 default=False,
401 )
401 )
402 coreconfigitem('diff', 'ignorewseol',
402 coreconfigitem('diff', 'ignorewseol',
403 default=False,
403 default=False,
404 )
404 )
405 coreconfigitem('diff', 'nobinary',
405 coreconfigitem('diff', 'nobinary',
406 default=False,
406 default=False,
407 )
407 )
408 coreconfigitem('diff', 'noprefix',
408 coreconfigitem('diff', 'noprefix',
409 default=False,
409 default=False,
410 )
410 )
411 coreconfigitem('diff', 'word-diff',
411 coreconfigitem('diff', 'word-diff',
412 default=False,
412 default=False,
413 )
413 )
414 coreconfigitem('email', 'bcc',
414 coreconfigitem('email', 'bcc',
415 default=None,
415 default=None,
416 )
416 )
417 coreconfigitem('email', 'cc',
417 coreconfigitem('email', 'cc',
418 default=None,
418 default=None,
419 )
419 )
420 coreconfigitem('email', 'charsets',
420 coreconfigitem('email', 'charsets',
421 default=list,
421 default=list,
422 )
422 )
423 coreconfigitem('email', 'from',
423 coreconfigitem('email', 'from',
424 default=None,
424 default=None,
425 )
425 )
426 coreconfigitem('email', 'method',
426 coreconfigitem('email', 'method',
427 default='smtp',
427 default='smtp',
428 )
428 )
429 coreconfigitem('email', 'reply-to',
429 coreconfigitem('email', 'reply-to',
430 default=None,
430 default=None,
431 )
431 )
432 coreconfigitem('email', 'to',
432 coreconfigitem('email', 'to',
433 default=None,
433 default=None,
434 )
434 )
435 coreconfigitem('experimental', 'archivemetatemplate',
435 coreconfigitem('experimental', 'archivemetatemplate',
436 default=dynamicdefault,
436 default=dynamicdefault,
437 )
437 )
438 coreconfigitem('experimental', 'bundle-phases',
438 coreconfigitem('experimental', 'bundle-phases',
439 default=False,
439 default=False,
440 )
440 )
441 coreconfigitem('experimental', 'bundle2-advertise',
441 coreconfigitem('experimental', 'bundle2-advertise',
442 default=True,
442 default=True,
443 )
443 )
444 coreconfigitem('experimental', 'bundle2-output-capture',
444 coreconfigitem('experimental', 'bundle2-output-capture',
445 default=False,
445 default=False,
446 )
446 )
447 coreconfigitem('experimental', 'bundle2.pushback',
447 coreconfigitem('experimental', 'bundle2.pushback',
448 default=False,
448 default=False,
449 )
449 )
450 coreconfigitem('experimental', 'bundle2.stream',
450 coreconfigitem('experimental', 'bundle2.stream',
451 default=False,
451 default=False,
452 )
452 )
453 coreconfigitem('experimental', 'bundle2lazylocking',
453 coreconfigitem('experimental', 'bundle2lazylocking',
454 default=False,
454 default=False,
455 )
455 )
456 coreconfigitem('experimental', 'bundlecomplevel',
456 coreconfigitem('experimental', 'bundlecomplevel',
457 default=None,
457 default=None,
458 )
458 )
459 coreconfigitem('experimental', 'bundlecomplevel.bzip2',
459 coreconfigitem('experimental', 'bundlecomplevel.bzip2',
460 default=None,
460 default=None,
461 )
461 )
462 coreconfigitem('experimental', 'bundlecomplevel.gzip',
462 coreconfigitem('experimental', 'bundlecomplevel.gzip',
463 default=None,
463 default=None,
464 )
464 )
465 coreconfigitem('experimental', 'bundlecomplevel.none',
465 coreconfigitem('experimental', 'bundlecomplevel.none',
466 default=None,
466 default=None,
467 )
467 )
468 coreconfigitem('experimental', 'bundlecomplevel.zstd',
468 coreconfigitem('experimental', 'bundlecomplevel.zstd',
469 default=None,
469 default=None,
470 )
470 )
471 coreconfigitem('experimental', 'changegroup3',
471 coreconfigitem('experimental', 'changegroup3',
472 default=False,
472 default=False,
473 )
473 )
474 coreconfigitem('experimental', 'clientcompressionengines',
474 coreconfigitem('experimental', 'clientcompressionengines',
475 default=list,
475 default=list,
476 )
476 )
477 coreconfigitem('experimental', 'copytrace',
477 coreconfigitem('experimental', 'copytrace',
478 default='on',
478 default='on',
479 )
479 )
480 coreconfigitem('experimental', 'copytrace.movecandidateslimit',
480 coreconfigitem('experimental', 'copytrace.movecandidateslimit',
481 default=100,
481 default=100,
482 )
482 )
483 coreconfigitem('experimental', 'copytrace.sourcecommitlimit',
483 coreconfigitem('experimental', 'copytrace.sourcecommitlimit',
484 default=100,
484 default=100,
485 )
485 )
486 coreconfigitem('experimental', 'crecordtest',
486 coreconfigitem('experimental', 'crecordtest',
487 default=None,
487 default=None,
488 )
488 )
489 coreconfigitem('experimental', 'directaccess',
489 coreconfigitem('experimental', 'directaccess',
490 default=False,
490 default=False,
491 )
491 )
492 coreconfigitem('experimental', 'directaccess.revnums',
492 coreconfigitem('experimental', 'directaccess.revnums',
493 default=False,
493 default=False,
494 )
494 )
495 coreconfigitem('experimental', 'editortmpinhg',
495 coreconfigitem('experimental', 'editortmpinhg',
496 default=False,
496 default=False,
497 )
497 )
498 coreconfigitem('experimental', 'evolution',
498 coreconfigitem('experimental', 'evolution',
499 default=list,
499 default=list,
500 )
500 )
501 coreconfigitem('experimental', 'evolution.allowdivergence',
501 coreconfigitem('experimental', 'evolution.allowdivergence',
502 default=False,
502 default=False,
503 alias=[('experimental', 'allowdivergence')]
503 alias=[('experimental', 'allowdivergence')]
504 )
504 )
505 coreconfigitem('experimental', 'evolution.allowunstable',
505 coreconfigitem('experimental', 'evolution.allowunstable',
506 default=None,
506 default=None,
507 )
507 )
508 coreconfigitem('experimental', 'evolution.createmarkers',
508 coreconfigitem('experimental', 'evolution.createmarkers',
509 default=None,
509 default=None,
510 )
510 )
511 coreconfigitem('experimental', 'evolution.effect-flags',
511 coreconfigitem('experimental', 'evolution.effect-flags',
512 default=True,
512 default=True,
513 alias=[('experimental', 'effect-flags')]
513 alias=[('experimental', 'effect-flags')]
514 )
514 )
515 coreconfigitem('experimental', 'evolution.exchange',
515 coreconfigitem('experimental', 'evolution.exchange',
516 default=None,
516 default=None,
517 )
517 )
518 coreconfigitem('experimental', 'evolution.bundle-obsmarker',
518 coreconfigitem('experimental', 'evolution.bundle-obsmarker',
519 default=False,
519 default=False,
520 )
520 )
521 coreconfigitem('experimental', 'evolution.report-instabilities',
521 coreconfigitem('experimental', 'evolution.report-instabilities',
522 default=True,
522 default=True,
523 )
523 )
524 coreconfigitem('experimental', 'evolution.track-operation',
524 coreconfigitem('experimental', 'evolution.track-operation',
525 default=True,
525 default=True,
526 )
526 )
527 coreconfigitem('experimental', 'maxdeltachainspan',
527 coreconfigitem('experimental', 'maxdeltachainspan',
528 default=-1,
528 default=-1,
529 )
529 )
530 coreconfigitem('experimental', 'mergetempdirprefix',
530 coreconfigitem('experimental', 'mergetempdirprefix',
531 default=None,
531 default=None,
532 )
532 )
533 coreconfigitem('experimental', 'mmapindexthreshold',
533 coreconfigitem('experimental', 'mmapindexthreshold',
534 default=None,
534 default=None,
535 )
535 )
536 coreconfigitem('experimental', 'nonnormalparanoidcheck',
536 coreconfigitem('experimental', 'nonnormalparanoidcheck',
537 default=False,
537 default=False,
538 )
538 )
539 coreconfigitem('experimental', 'exportableenviron',
539 coreconfigitem('experimental', 'exportableenviron',
540 default=list,
540 default=list,
541 )
541 )
542 coreconfigitem('experimental', 'extendedheader.index',
542 coreconfigitem('experimental', 'extendedheader.index',
543 default=None,
543 default=None,
544 )
544 )
545 coreconfigitem('experimental', 'extendedheader.similarity',
545 coreconfigitem('experimental', 'extendedheader.similarity',
546 default=False,
546 default=False,
547 )
547 )
548 coreconfigitem('experimental', 'format.compression',
548 coreconfigitem('experimental', 'format.compression',
549 default='zlib',
549 default='zlib',
550 )
550 )
551 coreconfigitem('experimental', 'graphshorten',
551 coreconfigitem('experimental', 'graphshorten',
552 default=False,
552 default=False,
553 )
553 )
554 coreconfigitem('experimental', 'graphstyle.parent',
554 coreconfigitem('experimental', 'graphstyle.parent',
555 default=dynamicdefault,
555 default=dynamicdefault,
556 )
556 )
557 coreconfigitem('experimental', 'graphstyle.missing',
557 coreconfigitem('experimental', 'graphstyle.missing',
558 default=dynamicdefault,
558 default=dynamicdefault,
559 )
559 )
560 coreconfigitem('experimental', 'graphstyle.grandparent',
560 coreconfigitem('experimental', 'graphstyle.grandparent',
561 default=dynamicdefault,
561 default=dynamicdefault,
562 )
562 )
563 coreconfigitem('experimental', 'hook-track-tags',
563 coreconfigitem('experimental', 'hook-track-tags',
564 default=False,
564 default=False,
565 )
565 )
566 coreconfigitem('experimental', 'httppeer.advertise-v2',
566 coreconfigitem('experimental', 'httppeer.advertise-v2',
567 default=False,
567 default=False,
568 )
568 )
569 coreconfigitem('experimental', 'httppostargs',
569 coreconfigitem('experimental', 'httppostargs',
570 default=False,
570 default=False,
571 )
571 )
572 coreconfigitem('experimental', 'mergedriver',
572 coreconfigitem('experimental', 'mergedriver',
573 default=None,
573 default=None,
574 )
574 )
575 coreconfigitem('experimental', 'nointerrupt', default=False)
575 coreconfigitem('experimental', 'nointerrupt', default=False)
576 coreconfigitem('experimental', 'nointerrupt-interactiveonly', default=True)
576 coreconfigitem('experimental', 'nointerrupt-interactiveonly', default=True)
577
577
578 coreconfigitem('experimental', 'obsmarkers-exchange-debug',
578 coreconfigitem('experimental', 'obsmarkers-exchange-debug',
579 default=False,
579 default=False,
580 )
580 )
581 coreconfigitem('experimental', 'remotenames',
581 coreconfigitem('experimental', 'remotenames',
582 default=False,
582 default=False,
583 )
583 )
584 coreconfigitem('experimental', 'removeemptydirs',
584 coreconfigitem('experimental', 'removeemptydirs',
585 default=True,
585 default=True,
586 )
586 )
587 coreconfigitem('experimental', 'revlogv2',
587 coreconfigitem('experimental', 'revlogv2',
588 default=None,
588 default=None,
589 )
589 )
590 coreconfigitem('experimental', 'single-head-per-branch',
590 coreconfigitem('experimental', 'single-head-per-branch',
591 default=False,
591 default=False,
592 )
592 )
593 coreconfigitem('experimental', 'sshserver.support-v2',
593 coreconfigitem('experimental', 'sshserver.support-v2',
594 default=False,
594 default=False,
595 )
595 )
596 coreconfigitem('experimental', 'spacemovesdown',
596 coreconfigitem('experimental', 'spacemovesdown',
597 default=False,
597 default=False,
598 )
598 )
599 coreconfigitem('experimental', 'sparse-read',
599 coreconfigitem('experimental', 'sparse-read',
600 default=False,
600 default=False,
601 )
601 )
602 coreconfigitem('experimental', 'sparse-read.density-threshold',
602 coreconfigitem('experimental', 'sparse-read.density-threshold',
603 default=0.50,
603 default=0.50,
604 )
604 )
605 coreconfigitem('experimental', 'sparse-read.min-gap-size',
605 coreconfigitem('experimental', 'sparse-read.min-gap-size',
606 default='65K',
606 default='65K',
607 )
607 )
608 coreconfigitem('experimental', 'treemanifest',
608 coreconfigitem('experimental', 'treemanifest',
609 default=False,
609 default=False,
610 )
610 )
611 coreconfigitem('experimental', 'update.atomic-file',
611 coreconfigitem('experimental', 'update.atomic-file',
612 default=False,
612 default=False,
613 )
613 )
614 coreconfigitem('experimental', 'sshpeer.advertise-v2',
614 coreconfigitem('experimental', 'sshpeer.advertise-v2',
615 default=False,
615 default=False,
616 )
616 )
617 coreconfigitem('experimental', 'web.apiserver',
617 coreconfigitem('experimental', 'web.apiserver',
618 default=False,
618 default=False,
619 )
619 )
620 coreconfigitem('experimental', 'web.api.http-v2',
620 coreconfigitem('experimental', 'web.api.http-v2',
621 default=False,
621 default=False,
622 )
622 )
623 coreconfigitem('experimental', 'web.api.debugreflect',
623 coreconfigitem('experimental', 'web.api.debugreflect',
624 default=False,
624 default=False,
625 )
625 )
626 coreconfigitem('experimental', 'worker.wdir-get-thread-safe',
627 default=False,
628 )
626 coreconfigitem('experimental', 'xdiff',
629 coreconfigitem('experimental', 'xdiff',
627 default=False,
630 default=False,
628 )
631 )
629 coreconfigitem('extensions', '.*',
632 coreconfigitem('extensions', '.*',
630 default=None,
633 default=None,
631 generic=True,
634 generic=True,
632 )
635 )
633 coreconfigitem('extdata', '.*',
636 coreconfigitem('extdata', '.*',
634 default=None,
637 default=None,
635 generic=True,
638 generic=True,
636 )
639 )
637 coreconfigitem('format', 'aggressivemergedeltas',
640 coreconfigitem('format', 'aggressivemergedeltas',
638 default=True,
641 default=True,
639 )
642 )
640 coreconfigitem('format', 'chunkcachesize',
643 coreconfigitem('format', 'chunkcachesize',
641 default=None,
644 default=None,
642 )
645 )
643 coreconfigitem('format', 'dotencode',
646 coreconfigitem('format', 'dotencode',
644 default=True,
647 default=True,
645 )
648 )
646 coreconfigitem('format', 'generaldelta',
649 coreconfigitem('format', 'generaldelta',
647 default=False,
650 default=False,
648 )
651 )
649 coreconfigitem('format', 'manifestcachesize',
652 coreconfigitem('format', 'manifestcachesize',
650 default=None,
653 default=None,
651 )
654 )
652 coreconfigitem('format', 'maxchainlen',
655 coreconfigitem('format', 'maxchainlen',
653 default=None,
656 default=None,
654 )
657 )
655 coreconfigitem('format', 'obsstore-version',
658 coreconfigitem('format', 'obsstore-version',
656 default=None,
659 default=None,
657 )
660 )
658 coreconfigitem('format', 'sparse-revlog',
661 coreconfigitem('format', 'sparse-revlog',
659 default=False,
662 default=False,
660 )
663 )
661 coreconfigitem('format', 'usefncache',
664 coreconfigitem('format', 'usefncache',
662 default=True,
665 default=True,
663 )
666 )
664 coreconfigitem('format', 'usegeneraldelta',
667 coreconfigitem('format', 'usegeneraldelta',
665 default=True,
668 default=True,
666 )
669 )
667 coreconfigitem('format', 'usestore',
670 coreconfigitem('format', 'usestore',
668 default=True,
671 default=True,
669 )
672 )
670 coreconfigitem('fsmonitor', 'warn_when_unused',
673 coreconfigitem('fsmonitor', 'warn_when_unused',
671 default=True,
674 default=True,
672 )
675 )
673 coreconfigitem('fsmonitor', 'warn_update_file_count',
676 coreconfigitem('fsmonitor', 'warn_update_file_count',
674 default=50000,
677 default=50000,
675 )
678 )
676 coreconfigitem('hooks', '.*',
679 coreconfigitem('hooks', '.*',
677 default=dynamicdefault,
680 default=dynamicdefault,
678 generic=True,
681 generic=True,
679 )
682 )
680 coreconfigitem('hgweb-paths', '.*',
683 coreconfigitem('hgweb-paths', '.*',
681 default=list,
684 default=list,
682 generic=True,
685 generic=True,
683 )
686 )
684 coreconfigitem('hostfingerprints', '.*',
687 coreconfigitem('hostfingerprints', '.*',
685 default=list,
688 default=list,
686 generic=True,
689 generic=True,
687 )
690 )
688 coreconfigitem('hostsecurity', 'ciphers',
691 coreconfigitem('hostsecurity', 'ciphers',
689 default=None,
692 default=None,
690 )
693 )
691 coreconfigitem('hostsecurity', 'disabletls10warning',
694 coreconfigitem('hostsecurity', 'disabletls10warning',
692 default=False,
695 default=False,
693 )
696 )
694 coreconfigitem('hostsecurity', 'minimumprotocol',
697 coreconfigitem('hostsecurity', 'minimumprotocol',
695 default=dynamicdefault,
698 default=dynamicdefault,
696 )
699 )
697 coreconfigitem('hostsecurity', '.*:minimumprotocol$',
700 coreconfigitem('hostsecurity', '.*:minimumprotocol$',
698 default=dynamicdefault,
701 default=dynamicdefault,
699 generic=True,
702 generic=True,
700 )
703 )
701 coreconfigitem('hostsecurity', '.*:ciphers$',
704 coreconfigitem('hostsecurity', '.*:ciphers$',
702 default=dynamicdefault,
705 default=dynamicdefault,
703 generic=True,
706 generic=True,
704 )
707 )
705 coreconfigitem('hostsecurity', '.*:fingerprints$',
708 coreconfigitem('hostsecurity', '.*:fingerprints$',
706 default=list,
709 default=list,
707 generic=True,
710 generic=True,
708 )
711 )
709 coreconfigitem('hostsecurity', '.*:verifycertsfile$',
712 coreconfigitem('hostsecurity', '.*:verifycertsfile$',
710 default=None,
713 default=None,
711 generic=True,
714 generic=True,
712 )
715 )
713
716
714 coreconfigitem('http_proxy', 'always',
717 coreconfigitem('http_proxy', 'always',
715 default=False,
718 default=False,
716 )
719 )
717 coreconfigitem('http_proxy', 'host',
720 coreconfigitem('http_proxy', 'host',
718 default=None,
721 default=None,
719 )
722 )
720 coreconfigitem('http_proxy', 'no',
723 coreconfigitem('http_proxy', 'no',
721 default=list,
724 default=list,
722 )
725 )
723 coreconfigitem('http_proxy', 'passwd',
726 coreconfigitem('http_proxy', 'passwd',
724 default=None,
727 default=None,
725 )
728 )
726 coreconfigitem('http_proxy', 'user',
729 coreconfigitem('http_proxy', 'user',
727 default=None,
730 default=None,
728 )
731 )
729 coreconfigitem('logtoprocess', 'commandexception',
732 coreconfigitem('logtoprocess', 'commandexception',
730 default=None,
733 default=None,
731 )
734 )
732 coreconfigitem('logtoprocess', 'commandfinish',
735 coreconfigitem('logtoprocess', 'commandfinish',
733 default=None,
736 default=None,
734 )
737 )
735 coreconfigitem('logtoprocess', 'command',
738 coreconfigitem('logtoprocess', 'command',
736 default=None,
739 default=None,
737 )
740 )
738 coreconfigitem('logtoprocess', 'develwarn',
741 coreconfigitem('logtoprocess', 'develwarn',
739 default=None,
742 default=None,
740 )
743 )
741 coreconfigitem('logtoprocess', 'uiblocked',
744 coreconfigitem('logtoprocess', 'uiblocked',
742 default=None,
745 default=None,
743 )
746 )
744 coreconfigitem('merge', 'checkunknown',
747 coreconfigitem('merge', 'checkunknown',
745 default='abort',
748 default='abort',
746 )
749 )
747 coreconfigitem('merge', 'checkignored',
750 coreconfigitem('merge', 'checkignored',
748 default='abort',
751 default='abort',
749 )
752 )
750 coreconfigitem('experimental', 'merge.checkpathconflicts',
753 coreconfigitem('experimental', 'merge.checkpathconflicts',
751 default=False,
754 default=False,
752 )
755 )
753 coreconfigitem('merge', 'followcopies',
756 coreconfigitem('merge', 'followcopies',
754 default=True,
757 default=True,
755 )
758 )
756 coreconfigitem('merge', 'on-failure',
759 coreconfigitem('merge', 'on-failure',
757 default='continue',
760 default='continue',
758 )
761 )
759 coreconfigitem('merge', 'preferancestor',
762 coreconfigitem('merge', 'preferancestor',
760 default=lambda: ['*'],
763 default=lambda: ['*'],
761 )
764 )
762 coreconfigitem('merge-tools', '.*',
765 coreconfigitem('merge-tools', '.*',
763 default=None,
766 default=None,
764 generic=True,
767 generic=True,
765 )
768 )
766 coreconfigitem('merge-tools', br'.*\.args$',
769 coreconfigitem('merge-tools', br'.*\.args$',
767 default="$local $base $other",
770 default="$local $base $other",
768 generic=True,
771 generic=True,
769 priority=-1,
772 priority=-1,
770 )
773 )
771 coreconfigitem('merge-tools', br'.*\.binary$',
774 coreconfigitem('merge-tools', br'.*\.binary$',
772 default=False,
775 default=False,
773 generic=True,
776 generic=True,
774 priority=-1,
777 priority=-1,
775 )
778 )
776 coreconfigitem('merge-tools', br'.*\.check$',
779 coreconfigitem('merge-tools', br'.*\.check$',
777 default=list,
780 default=list,
778 generic=True,
781 generic=True,
779 priority=-1,
782 priority=-1,
780 )
783 )
781 coreconfigitem('merge-tools', br'.*\.checkchanged$',
784 coreconfigitem('merge-tools', br'.*\.checkchanged$',
782 default=False,
785 default=False,
783 generic=True,
786 generic=True,
784 priority=-1,
787 priority=-1,
785 )
788 )
786 coreconfigitem('merge-tools', br'.*\.executable$',
789 coreconfigitem('merge-tools', br'.*\.executable$',
787 default=dynamicdefault,
790 default=dynamicdefault,
788 generic=True,
791 generic=True,
789 priority=-1,
792 priority=-1,
790 )
793 )
791 coreconfigitem('merge-tools', br'.*\.fixeol$',
794 coreconfigitem('merge-tools', br'.*\.fixeol$',
792 default=False,
795 default=False,
793 generic=True,
796 generic=True,
794 priority=-1,
797 priority=-1,
795 )
798 )
796 coreconfigitem('merge-tools', br'.*\.gui$',
799 coreconfigitem('merge-tools', br'.*\.gui$',
797 default=False,
800 default=False,
798 generic=True,
801 generic=True,
799 priority=-1,
802 priority=-1,
800 )
803 )
801 coreconfigitem('merge-tools', br'.*\.mergemarkers$',
804 coreconfigitem('merge-tools', br'.*\.mergemarkers$',
802 default='basic',
805 default='basic',
803 generic=True,
806 generic=True,
804 priority=-1,
807 priority=-1,
805 )
808 )
806 coreconfigitem('merge-tools', br'.*\.mergemarkertemplate$',
809 coreconfigitem('merge-tools', br'.*\.mergemarkertemplate$',
807 default=dynamicdefault, # take from ui.mergemarkertemplate
810 default=dynamicdefault, # take from ui.mergemarkertemplate
808 generic=True,
811 generic=True,
809 priority=-1,
812 priority=-1,
810 )
813 )
811 coreconfigitem('merge-tools', br'.*\.priority$',
814 coreconfigitem('merge-tools', br'.*\.priority$',
812 default=0,
815 default=0,
813 generic=True,
816 generic=True,
814 priority=-1,
817 priority=-1,
815 )
818 )
816 coreconfigitem('merge-tools', br'.*\.premerge$',
819 coreconfigitem('merge-tools', br'.*\.premerge$',
817 default=dynamicdefault,
820 default=dynamicdefault,
818 generic=True,
821 generic=True,
819 priority=-1,
822 priority=-1,
820 )
823 )
821 coreconfigitem('merge-tools', br'.*\.symlink$',
824 coreconfigitem('merge-tools', br'.*\.symlink$',
822 default=False,
825 default=False,
823 generic=True,
826 generic=True,
824 priority=-1,
827 priority=-1,
825 )
828 )
826 coreconfigitem('pager', 'attend-.*',
829 coreconfigitem('pager', 'attend-.*',
827 default=dynamicdefault,
830 default=dynamicdefault,
828 generic=True,
831 generic=True,
829 )
832 )
830 coreconfigitem('pager', 'ignore',
833 coreconfigitem('pager', 'ignore',
831 default=list,
834 default=list,
832 )
835 )
833 coreconfigitem('pager', 'pager',
836 coreconfigitem('pager', 'pager',
834 default=dynamicdefault,
837 default=dynamicdefault,
835 )
838 )
836 coreconfigitem('patch', 'eol',
839 coreconfigitem('patch', 'eol',
837 default='strict',
840 default='strict',
838 )
841 )
839 coreconfigitem('patch', 'fuzz',
842 coreconfigitem('patch', 'fuzz',
840 default=2,
843 default=2,
841 )
844 )
842 coreconfigitem('paths', 'default',
845 coreconfigitem('paths', 'default',
843 default=None,
846 default=None,
844 )
847 )
845 coreconfigitem('paths', 'default-push',
848 coreconfigitem('paths', 'default-push',
846 default=None,
849 default=None,
847 )
850 )
848 coreconfigitem('paths', '.*',
851 coreconfigitem('paths', '.*',
849 default=None,
852 default=None,
850 generic=True,
853 generic=True,
851 )
854 )
852 coreconfigitem('phases', 'checksubrepos',
855 coreconfigitem('phases', 'checksubrepos',
853 default='follow',
856 default='follow',
854 )
857 )
855 coreconfigitem('phases', 'new-commit',
858 coreconfigitem('phases', 'new-commit',
856 default='draft',
859 default='draft',
857 )
860 )
858 coreconfigitem('phases', 'publish',
861 coreconfigitem('phases', 'publish',
859 default=True,
862 default=True,
860 )
863 )
861 coreconfigitem('profiling', 'enabled',
864 coreconfigitem('profiling', 'enabled',
862 default=False,
865 default=False,
863 )
866 )
864 coreconfigitem('profiling', 'format',
867 coreconfigitem('profiling', 'format',
865 default='text',
868 default='text',
866 )
869 )
867 coreconfigitem('profiling', 'freq',
870 coreconfigitem('profiling', 'freq',
868 default=1000,
871 default=1000,
869 )
872 )
870 coreconfigitem('profiling', 'limit',
873 coreconfigitem('profiling', 'limit',
871 default=30,
874 default=30,
872 )
875 )
873 coreconfigitem('profiling', 'nested',
876 coreconfigitem('profiling', 'nested',
874 default=0,
877 default=0,
875 )
878 )
876 coreconfigitem('profiling', 'output',
879 coreconfigitem('profiling', 'output',
877 default=None,
880 default=None,
878 )
881 )
879 coreconfigitem('profiling', 'showmax',
882 coreconfigitem('profiling', 'showmax',
880 default=0.999,
883 default=0.999,
881 )
884 )
882 coreconfigitem('profiling', 'showmin',
885 coreconfigitem('profiling', 'showmin',
883 default=dynamicdefault,
886 default=dynamicdefault,
884 )
887 )
885 coreconfigitem('profiling', 'sort',
888 coreconfigitem('profiling', 'sort',
886 default='inlinetime',
889 default='inlinetime',
887 )
890 )
888 coreconfigitem('profiling', 'statformat',
891 coreconfigitem('profiling', 'statformat',
889 default='hotpath',
892 default='hotpath',
890 )
893 )
891 coreconfigitem('profiling', 'time-track',
894 coreconfigitem('profiling', 'time-track',
892 default='cpu',
895 default='cpu',
893 )
896 )
894 coreconfigitem('profiling', 'type',
897 coreconfigitem('profiling', 'type',
895 default='stat',
898 default='stat',
896 )
899 )
897 coreconfigitem('progress', 'assume-tty',
900 coreconfigitem('progress', 'assume-tty',
898 default=False,
901 default=False,
899 )
902 )
900 coreconfigitem('progress', 'changedelay',
903 coreconfigitem('progress', 'changedelay',
901 default=1,
904 default=1,
902 )
905 )
903 coreconfigitem('progress', 'clear-complete',
906 coreconfigitem('progress', 'clear-complete',
904 default=True,
907 default=True,
905 )
908 )
906 coreconfigitem('progress', 'debug',
909 coreconfigitem('progress', 'debug',
907 default=False,
910 default=False,
908 )
911 )
909 coreconfigitem('progress', 'delay',
912 coreconfigitem('progress', 'delay',
910 default=3,
913 default=3,
911 )
914 )
912 coreconfigitem('progress', 'disable',
915 coreconfigitem('progress', 'disable',
913 default=False,
916 default=False,
914 )
917 )
915 coreconfigitem('progress', 'estimateinterval',
918 coreconfigitem('progress', 'estimateinterval',
916 default=60.0,
919 default=60.0,
917 )
920 )
918 coreconfigitem('progress', 'format',
921 coreconfigitem('progress', 'format',
919 default=lambda: ['topic', 'bar', 'number', 'estimate'],
922 default=lambda: ['topic', 'bar', 'number', 'estimate'],
920 )
923 )
921 coreconfigitem('progress', 'refresh',
924 coreconfigitem('progress', 'refresh',
922 default=0.1,
925 default=0.1,
923 )
926 )
924 coreconfigitem('progress', 'width',
927 coreconfigitem('progress', 'width',
925 default=dynamicdefault,
928 default=dynamicdefault,
926 )
929 )
927 coreconfigitem('push', 'pushvars.server',
930 coreconfigitem('push', 'pushvars.server',
928 default=False,
931 default=False,
929 )
932 )
930 coreconfigitem('server', 'bookmarks-pushkey-compat',
933 coreconfigitem('server', 'bookmarks-pushkey-compat',
931 default=True,
934 default=True,
932 )
935 )
933 coreconfigitem('server', 'bundle1',
936 coreconfigitem('server', 'bundle1',
934 default=True,
937 default=True,
935 )
938 )
936 coreconfigitem('server', 'bundle1gd',
939 coreconfigitem('server', 'bundle1gd',
937 default=None,
940 default=None,
938 )
941 )
939 coreconfigitem('server', 'bundle1.pull',
942 coreconfigitem('server', 'bundle1.pull',
940 default=None,
943 default=None,
941 )
944 )
942 coreconfigitem('server', 'bundle1gd.pull',
945 coreconfigitem('server', 'bundle1gd.pull',
943 default=None,
946 default=None,
944 )
947 )
945 coreconfigitem('server', 'bundle1.push',
948 coreconfigitem('server', 'bundle1.push',
946 default=None,
949 default=None,
947 )
950 )
948 coreconfigitem('server', 'bundle1gd.push',
951 coreconfigitem('server', 'bundle1gd.push',
949 default=None,
952 default=None,
950 )
953 )
951 coreconfigitem('server', 'compressionengines',
954 coreconfigitem('server', 'compressionengines',
952 default=list,
955 default=list,
953 )
956 )
954 coreconfigitem('server', 'concurrent-push-mode',
957 coreconfigitem('server', 'concurrent-push-mode',
955 default='strict',
958 default='strict',
956 )
959 )
957 coreconfigitem('server', 'disablefullbundle',
960 coreconfigitem('server', 'disablefullbundle',
958 default=False,
961 default=False,
959 )
962 )
960 coreconfigitem('server', 'maxhttpheaderlen',
963 coreconfigitem('server', 'maxhttpheaderlen',
961 default=1024,
964 default=1024,
962 )
965 )
963 coreconfigitem('server', 'pullbundle',
966 coreconfigitem('server', 'pullbundle',
964 default=False,
967 default=False,
965 )
968 )
966 coreconfigitem('server', 'preferuncompressed',
969 coreconfigitem('server', 'preferuncompressed',
967 default=False,
970 default=False,
968 )
971 )
969 coreconfigitem('server', 'streamunbundle',
972 coreconfigitem('server', 'streamunbundle',
970 default=False,
973 default=False,
971 )
974 )
972 coreconfigitem('server', 'uncompressed',
975 coreconfigitem('server', 'uncompressed',
973 default=True,
976 default=True,
974 )
977 )
975 coreconfigitem('server', 'uncompressedallowsecret',
978 coreconfigitem('server', 'uncompressedallowsecret',
976 default=False,
979 default=False,
977 )
980 )
978 coreconfigitem('server', 'validate',
981 coreconfigitem('server', 'validate',
979 default=False,
982 default=False,
980 )
983 )
981 coreconfigitem('server', 'zliblevel',
984 coreconfigitem('server', 'zliblevel',
982 default=-1,
985 default=-1,
983 )
986 )
984 coreconfigitem('server', 'zstdlevel',
987 coreconfigitem('server', 'zstdlevel',
985 default=3,
988 default=3,
986 )
989 )
987 coreconfigitem('share', 'pool',
990 coreconfigitem('share', 'pool',
988 default=None,
991 default=None,
989 )
992 )
990 coreconfigitem('share', 'poolnaming',
993 coreconfigitem('share', 'poolnaming',
991 default='identity',
994 default='identity',
992 )
995 )
993 coreconfigitem('smtp', 'host',
996 coreconfigitem('smtp', 'host',
994 default=None,
997 default=None,
995 )
998 )
996 coreconfigitem('smtp', 'local_hostname',
999 coreconfigitem('smtp', 'local_hostname',
997 default=None,
1000 default=None,
998 )
1001 )
999 coreconfigitem('smtp', 'password',
1002 coreconfigitem('smtp', 'password',
1000 default=None,
1003 default=None,
1001 )
1004 )
1002 coreconfigitem('smtp', 'port',
1005 coreconfigitem('smtp', 'port',
1003 default=dynamicdefault,
1006 default=dynamicdefault,
1004 )
1007 )
1005 coreconfigitem('smtp', 'tls',
1008 coreconfigitem('smtp', 'tls',
1006 default='none',
1009 default='none',
1007 )
1010 )
1008 coreconfigitem('smtp', 'username',
1011 coreconfigitem('smtp', 'username',
1009 default=None,
1012 default=None,
1010 )
1013 )
1011 coreconfigitem('sparse', 'missingwarning',
1014 coreconfigitem('sparse', 'missingwarning',
1012 default=True,
1015 default=True,
1013 )
1016 )
1014 coreconfigitem('subrepos', 'allowed',
1017 coreconfigitem('subrepos', 'allowed',
1015 default=dynamicdefault, # to make backporting simpler
1018 default=dynamicdefault, # to make backporting simpler
1016 )
1019 )
1017 coreconfigitem('subrepos', 'hg:allowed',
1020 coreconfigitem('subrepos', 'hg:allowed',
1018 default=dynamicdefault,
1021 default=dynamicdefault,
1019 )
1022 )
1020 coreconfigitem('subrepos', 'git:allowed',
1023 coreconfigitem('subrepos', 'git:allowed',
1021 default=dynamicdefault,
1024 default=dynamicdefault,
1022 )
1025 )
1023 coreconfigitem('subrepos', 'svn:allowed',
1026 coreconfigitem('subrepos', 'svn:allowed',
1024 default=dynamicdefault,
1027 default=dynamicdefault,
1025 )
1028 )
1026 coreconfigitem('templates', '.*',
1029 coreconfigitem('templates', '.*',
1027 default=None,
1030 default=None,
1028 generic=True,
1031 generic=True,
1029 )
1032 )
1030 coreconfigitem('trusted', 'groups',
1033 coreconfigitem('trusted', 'groups',
1031 default=list,
1034 default=list,
1032 )
1035 )
1033 coreconfigitem('trusted', 'users',
1036 coreconfigitem('trusted', 'users',
1034 default=list,
1037 default=list,
1035 )
1038 )
1036 coreconfigitem('ui', '_usedassubrepo',
1039 coreconfigitem('ui', '_usedassubrepo',
1037 default=False,
1040 default=False,
1038 )
1041 )
1039 coreconfigitem('ui', 'allowemptycommit',
1042 coreconfigitem('ui', 'allowemptycommit',
1040 default=False,
1043 default=False,
1041 )
1044 )
1042 coreconfigitem('ui', 'archivemeta',
1045 coreconfigitem('ui', 'archivemeta',
1043 default=True,
1046 default=True,
1044 )
1047 )
1045 coreconfigitem('ui', 'askusername',
1048 coreconfigitem('ui', 'askusername',
1046 default=False,
1049 default=False,
1047 )
1050 )
1048 coreconfigitem('ui', 'clonebundlefallback',
1051 coreconfigitem('ui', 'clonebundlefallback',
1049 default=False,
1052 default=False,
1050 )
1053 )
1051 coreconfigitem('ui', 'clonebundleprefers',
1054 coreconfigitem('ui', 'clonebundleprefers',
1052 default=list,
1055 default=list,
1053 )
1056 )
1054 coreconfigitem('ui', 'clonebundles',
1057 coreconfigitem('ui', 'clonebundles',
1055 default=True,
1058 default=True,
1056 )
1059 )
1057 coreconfigitem('ui', 'color',
1060 coreconfigitem('ui', 'color',
1058 default='auto',
1061 default='auto',
1059 )
1062 )
1060 coreconfigitem('ui', 'commitsubrepos',
1063 coreconfigitem('ui', 'commitsubrepos',
1061 default=False,
1064 default=False,
1062 )
1065 )
1063 coreconfigitem('ui', 'debug',
1066 coreconfigitem('ui', 'debug',
1064 default=False,
1067 default=False,
1065 )
1068 )
1066 coreconfigitem('ui', 'debugger',
1069 coreconfigitem('ui', 'debugger',
1067 default=None,
1070 default=None,
1068 )
1071 )
1069 coreconfigitem('ui', 'editor',
1072 coreconfigitem('ui', 'editor',
1070 default=dynamicdefault,
1073 default=dynamicdefault,
1071 )
1074 )
1072 coreconfigitem('ui', 'fallbackencoding',
1075 coreconfigitem('ui', 'fallbackencoding',
1073 default=None,
1076 default=None,
1074 )
1077 )
1075 coreconfigitem('ui', 'forcecwd',
1078 coreconfigitem('ui', 'forcecwd',
1076 default=None,
1079 default=None,
1077 )
1080 )
1078 coreconfigitem('ui', 'forcemerge',
1081 coreconfigitem('ui', 'forcemerge',
1079 default=None,
1082 default=None,
1080 )
1083 )
1081 coreconfigitem('ui', 'formatdebug',
1084 coreconfigitem('ui', 'formatdebug',
1082 default=False,
1085 default=False,
1083 )
1086 )
1084 coreconfigitem('ui', 'formatjson',
1087 coreconfigitem('ui', 'formatjson',
1085 default=False,
1088 default=False,
1086 )
1089 )
1087 coreconfigitem('ui', 'formatted',
1090 coreconfigitem('ui', 'formatted',
1088 default=None,
1091 default=None,
1089 )
1092 )
1090 coreconfigitem('ui', 'graphnodetemplate',
1093 coreconfigitem('ui', 'graphnodetemplate',
1091 default=None,
1094 default=None,
1092 )
1095 )
1093 coreconfigitem('ui', 'interactive',
1096 coreconfigitem('ui', 'interactive',
1094 default=None,
1097 default=None,
1095 )
1098 )
1096 coreconfigitem('ui', 'interface',
1099 coreconfigitem('ui', 'interface',
1097 default=None,
1100 default=None,
1098 )
1101 )
1099 coreconfigitem('ui', 'interface.chunkselector',
1102 coreconfigitem('ui', 'interface.chunkselector',
1100 default=None,
1103 default=None,
1101 )
1104 )
1102 coreconfigitem('ui', 'large-file-limit',
1105 coreconfigitem('ui', 'large-file-limit',
1103 default=10000000,
1106 default=10000000,
1104 )
1107 )
1105 coreconfigitem('ui', 'logblockedtimes',
1108 coreconfigitem('ui', 'logblockedtimes',
1106 default=False,
1109 default=False,
1107 )
1110 )
1108 coreconfigitem('ui', 'logtemplate',
1111 coreconfigitem('ui', 'logtemplate',
1109 default=None,
1112 default=None,
1110 )
1113 )
1111 coreconfigitem('ui', 'merge',
1114 coreconfigitem('ui', 'merge',
1112 default=None,
1115 default=None,
1113 )
1116 )
1114 coreconfigitem('ui', 'mergemarkers',
1117 coreconfigitem('ui', 'mergemarkers',
1115 default='basic',
1118 default='basic',
1116 )
1119 )
1117 coreconfigitem('ui', 'mergemarkertemplate',
1120 coreconfigitem('ui', 'mergemarkertemplate',
1118 default=('{node|short} '
1121 default=('{node|short} '
1119 '{ifeq(tags, "tip", "", '
1122 '{ifeq(tags, "tip", "", '
1120 'ifeq(tags, "", "", "{tags} "))}'
1123 'ifeq(tags, "", "", "{tags} "))}'
1121 '{if(bookmarks, "{bookmarks} ")}'
1124 '{if(bookmarks, "{bookmarks} ")}'
1122 '{ifeq(branch, "default", "", "{branch} ")}'
1125 '{ifeq(branch, "default", "", "{branch} ")}'
1123 '- {author|user}: {desc|firstline}')
1126 '- {author|user}: {desc|firstline}')
1124 )
1127 )
1125 coreconfigitem('ui', 'nontty',
1128 coreconfigitem('ui', 'nontty',
1126 default=False,
1129 default=False,
1127 )
1130 )
1128 coreconfigitem('ui', 'origbackuppath',
1131 coreconfigitem('ui', 'origbackuppath',
1129 default=None,
1132 default=None,
1130 )
1133 )
1131 coreconfigitem('ui', 'paginate',
1134 coreconfigitem('ui', 'paginate',
1132 default=True,
1135 default=True,
1133 )
1136 )
1134 coreconfigitem('ui', 'patch',
1137 coreconfigitem('ui', 'patch',
1135 default=None,
1138 default=None,
1136 )
1139 )
1137 coreconfigitem('ui', 'portablefilenames',
1140 coreconfigitem('ui', 'portablefilenames',
1138 default='warn',
1141 default='warn',
1139 )
1142 )
1140 coreconfigitem('ui', 'promptecho',
1143 coreconfigitem('ui', 'promptecho',
1141 default=False,
1144 default=False,
1142 )
1145 )
1143 coreconfigitem('ui', 'quiet',
1146 coreconfigitem('ui', 'quiet',
1144 default=False,
1147 default=False,
1145 )
1148 )
1146 coreconfigitem('ui', 'quietbookmarkmove',
1149 coreconfigitem('ui', 'quietbookmarkmove',
1147 default=False,
1150 default=False,
1148 )
1151 )
1149 coreconfigitem('ui', 'remotecmd',
1152 coreconfigitem('ui', 'remotecmd',
1150 default='hg',
1153 default='hg',
1151 )
1154 )
1152 coreconfigitem('ui', 'report_untrusted',
1155 coreconfigitem('ui', 'report_untrusted',
1153 default=True,
1156 default=True,
1154 )
1157 )
1155 coreconfigitem('ui', 'rollback',
1158 coreconfigitem('ui', 'rollback',
1156 default=True,
1159 default=True,
1157 )
1160 )
1158 coreconfigitem('ui', 'signal-safe-lock',
1161 coreconfigitem('ui', 'signal-safe-lock',
1159 default=True,
1162 default=True,
1160 )
1163 )
1161 coreconfigitem('ui', 'slash',
1164 coreconfigitem('ui', 'slash',
1162 default=False,
1165 default=False,
1163 )
1166 )
1164 coreconfigitem('ui', 'ssh',
1167 coreconfigitem('ui', 'ssh',
1165 default='ssh',
1168 default='ssh',
1166 )
1169 )
1167 coreconfigitem('ui', 'ssherrorhint',
1170 coreconfigitem('ui', 'ssherrorhint',
1168 default=None,
1171 default=None,
1169 )
1172 )
1170 coreconfigitem('ui', 'statuscopies',
1173 coreconfigitem('ui', 'statuscopies',
1171 default=False,
1174 default=False,
1172 )
1175 )
1173 coreconfigitem('ui', 'strict',
1176 coreconfigitem('ui', 'strict',
1174 default=False,
1177 default=False,
1175 )
1178 )
1176 coreconfigitem('ui', 'style',
1179 coreconfigitem('ui', 'style',
1177 default='',
1180 default='',
1178 )
1181 )
1179 coreconfigitem('ui', 'supportcontact',
1182 coreconfigitem('ui', 'supportcontact',
1180 default=None,
1183 default=None,
1181 )
1184 )
1182 coreconfigitem('ui', 'textwidth',
1185 coreconfigitem('ui', 'textwidth',
1183 default=78,
1186 default=78,
1184 )
1187 )
1185 coreconfigitem('ui', 'timeout',
1188 coreconfigitem('ui', 'timeout',
1186 default='600',
1189 default='600',
1187 )
1190 )
1188 coreconfigitem('ui', 'timeout.warn',
1191 coreconfigitem('ui', 'timeout.warn',
1189 default=0,
1192 default=0,
1190 )
1193 )
1191 coreconfigitem('ui', 'traceback',
1194 coreconfigitem('ui', 'traceback',
1192 default=False,
1195 default=False,
1193 )
1196 )
1194 coreconfigitem('ui', 'tweakdefaults',
1197 coreconfigitem('ui', 'tweakdefaults',
1195 default=False,
1198 default=False,
1196 )
1199 )
1197 coreconfigitem('ui', 'username',
1200 coreconfigitem('ui', 'username',
1198 alias=[('ui', 'user')]
1201 alias=[('ui', 'user')]
1199 )
1202 )
1200 coreconfigitem('ui', 'verbose',
1203 coreconfigitem('ui', 'verbose',
1201 default=False,
1204 default=False,
1202 )
1205 )
1203 coreconfigitem('verify', 'skipflags',
1206 coreconfigitem('verify', 'skipflags',
1204 default=None,
1207 default=None,
1205 )
1208 )
1206 coreconfigitem('web', 'allowbz2',
1209 coreconfigitem('web', 'allowbz2',
1207 default=False,
1210 default=False,
1208 )
1211 )
1209 coreconfigitem('web', 'allowgz',
1212 coreconfigitem('web', 'allowgz',
1210 default=False,
1213 default=False,
1211 )
1214 )
1212 coreconfigitem('web', 'allow-pull',
1215 coreconfigitem('web', 'allow-pull',
1213 alias=[('web', 'allowpull')],
1216 alias=[('web', 'allowpull')],
1214 default=True,
1217 default=True,
1215 )
1218 )
1216 coreconfigitem('web', 'allow-push',
1219 coreconfigitem('web', 'allow-push',
1217 alias=[('web', 'allow_push')],
1220 alias=[('web', 'allow_push')],
1218 default=list,
1221 default=list,
1219 )
1222 )
1220 coreconfigitem('web', 'allowzip',
1223 coreconfigitem('web', 'allowzip',
1221 default=False,
1224 default=False,
1222 )
1225 )
1223 coreconfigitem('web', 'archivesubrepos',
1226 coreconfigitem('web', 'archivesubrepos',
1224 default=False,
1227 default=False,
1225 )
1228 )
1226 coreconfigitem('web', 'cache',
1229 coreconfigitem('web', 'cache',
1227 default=True,
1230 default=True,
1228 )
1231 )
1229 coreconfigitem('web', 'contact',
1232 coreconfigitem('web', 'contact',
1230 default=None,
1233 default=None,
1231 )
1234 )
1232 coreconfigitem('web', 'deny_push',
1235 coreconfigitem('web', 'deny_push',
1233 default=list,
1236 default=list,
1234 )
1237 )
1235 coreconfigitem('web', 'guessmime',
1238 coreconfigitem('web', 'guessmime',
1236 default=False,
1239 default=False,
1237 )
1240 )
1238 coreconfigitem('web', 'hidden',
1241 coreconfigitem('web', 'hidden',
1239 default=False,
1242 default=False,
1240 )
1243 )
1241 coreconfigitem('web', 'labels',
1244 coreconfigitem('web', 'labels',
1242 default=list,
1245 default=list,
1243 )
1246 )
1244 coreconfigitem('web', 'logoimg',
1247 coreconfigitem('web', 'logoimg',
1245 default='hglogo.png',
1248 default='hglogo.png',
1246 )
1249 )
1247 coreconfigitem('web', 'logourl',
1250 coreconfigitem('web', 'logourl',
1248 default='https://mercurial-scm.org/',
1251 default='https://mercurial-scm.org/',
1249 )
1252 )
1250 coreconfigitem('web', 'accesslog',
1253 coreconfigitem('web', 'accesslog',
1251 default='-',
1254 default='-',
1252 )
1255 )
1253 coreconfigitem('web', 'address',
1256 coreconfigitem('web', 'address',
1254 default='',
1257 default='',
1255 )
1258 )
1256 coreconfigitem('web', 'allow-archive',
1259 coreconfigitem('web', 'allow-archive',
1257 alias=[('web', 'allow_archive')],
1260 alias=[('web', 'allow_archive')],
1258 default=list,
1261 default=list,
1259 )
1262 )
1260 coreconfigitem('web', 'allow_read',
1263 coreconfigitem('web', 'allow_read',
1261 default=list,
1264 default=list,
1262 )
1265 )
1263 coreconfigitem('web', 'baseurl',
1266 coreconfigitem('web', 'baseurl',
1264 default=None,
1267 default=None,
1265 )
1268 )
1266 coreconfigitem('web', 'cacerts',
1269 coreconfigitem('web', 'cacerts',
1267 default=None,
1270 default=None,
1268 )
1271 )
1269 coreconfigitem('web', 'certificate',
1272 coreconfigitem('web', 'certificate',
1270 default=None,
1273 default=None,
1271 )
1274 )
1272 coreconfigitem('web', 'collapse',
1275 coreconfigitem('web', 'collapse',
1273 default=False,
1276 default=False,
1274 )
1277 )
1275 coreconfigitem('web', 'csp',
1278 coreconfigitem('web', 'csp',
1276 default=None,
1279 default=None,
1277 )
1280 )
1278 coreconfigitem('web', 'deny_read',
1281 coreconfigitem('web', 'deny_read',
1279 default=list,
1282 default=list,
1280 )
1283 )
1281 coreconfigitem('web', 'descend',
1284 coreconfigitem('web', 'descend',
1282 default=True,
1285 default=True,
1283 )
1286 )
1284 coreconfigitem('web', 'description',
1287 coreconfigitem('web', 'description',
1285 default="",
1288 default="",
1286 )
1289 )
1287 coreconfigitem('web', 'encoding',
1290 coreconfigitem('web', 'encoding',
1288 default=lambda: encoding.encoding,
1291 default=lambda: encoding.encoding,
1289 )
1292 )
1290 coreconfigitem('web', 'errorlog',
1293 coreconfigitem('web', 'errorlog',
1291 default='-',
1294 default='-',
1292 )
1295 )
1293 coreconfigitem('web', 'ipv6',
1296 coreconfigitem('web', 'ipv6',
1294 default=False,
1297 default=False,
1295 )
1298 )
1296 coreconfigitem('web', 'maxchanges',
1299 coreconfigitem('web', 'maxchanges',
1297 default=10,
1300 default=10,
1298 )
1301 )
1299 coreconfigitem('web', 'maxfiles',
1302 coreconfigitem('web', 'maxfiles',
1300 default=10,
1303 default=10,
1301 )
1304 )
1302 coreconfigitem('web', 'maxshortchanges',
1305 coreconfigitem('web', 'maxshortchanges',
1303 default=60,
1306 default=60,
1304 )
1307 )
1305 coreconfigitem('web', 'motd',
1308 coreconfigitem('web', 'motd',
1306 default='',
1309 default='',
1307 )
1310 )
1308 coreconfigitem('web', 'name',
1311 coreconfigitem('web', 'name',
1309 default=dynamicdefault,
1312 default=dynamicdefault,
1310 )
1313 )
1311 coreconfigitem('web', 'port',
1314 coreconfigitem('web', 'port',
1312 default=8000,
1315 default=8000,
1313 )
1316 )
1314 coreconfigitem('web', 'prefix',
1317 coreconfigitem('web', 'prefix',
1315 default='',
1318 default='',
1316 )
1319 )
1317 coreconfigitem('web', 'push_ssl',
1320 coreconfigitem('web', 'push_ssl',
1318 default=True,
1321 default=True,
1319 )
1322 )
1320 coreconfigitem('web', 'refreshinterval',
1323 coreconfigitem('web', 'refreshinterval',
1321 default=20,
1324 default=20,
1322 )
1325 )
1323 coreconfigitem('web', 'server-header',
1326 coreconfigitem('web', 'server-header',
1324 default=None,
1327 default=None,
1325 )
1328 )
1326 coreconfigitem('web', 'staticurl',
1329 coreconfigitem('web', 'staticurl',
1327 default=None,
1330 default=None,
1328 )
1331 )
1329 coreconfigitem('web', 'stripes',
1332 coreconfigitem('web', 'stripes',
1330 default=1,
1333 default=1,
1331 )
1334 )
1332 coreconfigitem('web', 'style',
1335 coreconfigitem('web', 'style',
1333 default='paper',
1336 default='paper',
1334 )
1337 )
1335 coreconfigitem('web', 'templates',
1338 coreconfigitem('web', 'templates',
1336 default=None,
1339 default=None,
1337 )
1340 )
1338 coreconfigitem('web', 'view',
1341 coreconfigitem('web', 'view',
1339 default='served',
1342 default='served',
1340 )
1343 )
1341 coreconfigitem('worker', 'backgroundclose',
1344 coreconfigitem('worker', 'backgroundclose',
1342 default=dynamicdefault,
1345 default=dynamicdefault,
1343 )
1346 )
1344 # Windows defaults to a limit of 512 open files. A buffer of 128
1347 # Windows defaults to a limit of 512 open files. A buffer of 128
1345 # should give us enough headway.
1348 # should give us enough headway.
1346 coreconfigitem('worker', 'backgroundclosemaxqueue',
1349 coreconfigitem('worker', 'backgroundclosemaxqueue',
1347 default=384,
1350 default=384,
1348 )
1351 )
1349 coreconfigitem('worker', 'backgroundcloseminfilecount',
1352 coreconfigitem('worker', 'backgroundcloseminfilecount',
1350 default=2048,
1353 default=2048,
1351 )
1354 )
1352 coreconfigitem('worker', 'backgroundclosethreadcount',
1355 coreconfigitem('worker', 'backgroundclosethreadcount',
1353 default=4,
1356 default=4,
1354 )
1357 )
1355 coreconfigitem('worker', 'enabled',
1358 coreconfigitem('worker', 'enabled',
1356 default=True,
1359 default=True,
1357 )
1360 )
1358 coreconfigitem('worker', 'numcpus',
1361 coreconfigitem('worker', 'numcpus',
1359 default=None,
1362 default=None,
1360 )
1363 )
1361
1364
1362 # Rebase related configuration moved to core because other extension are doing
1365 # Rebase related configuration moved to core because other extension are doing
1363 # strange things. For example, shelve import the extensions to reuse some bit
1366 # strange things. For example, shelve import the extensions to reuse some bit
1364 # without formally loading it.
1367 # without formally loading it.
1365 coreconfigitem('commands', 'rebase.requiredest',
1368 coreconfigitem('commands', 'rebase.requiredest',
1366 default=False,
1369 default=False,
1367 )
1370 )
1368 coreconfigitem('experimental', 'rebaseskipobsolete',
1371 coreconfigitem('experimental', 'rebaseskipobsolete',
1369 default=True,
1372 default=True,
1370 )
1373 )
1371 coreconfigitem('rebase', 'singletransaction',
1374 coreconfigitem('rebase', 'singletransaction',
1372 default=False,
1375 default=False,
1373 )
1376 )
1374 coreconfigitem('rebase', 'experimental.inmemory',
1377 coreconfigitem('rebase', 'experimental.inmemory',
1375 default=False,
1378 default=False,
1376 )
1379 )
@@ -1,2240 +1,2243
1 # merge.py - directory-level update/merge handling for Mercurial
1 # merge.py - directory-level update/merge handling for Mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import hashlib
11 import hashlib
12 import shutil
12 import shutil
13 import struct
13 import struct
14
14
15 from .i18n import _
15 from .i18n import _
16 from .node import (
16 from .node import (
17 addednodeid,
17 addednodeid,
18 bin,
18 bin,
19 hex,
19 hex,
20 modifiednodeid,
20 modifiednodeid,
21 nullhex,
21 nullhex,
22 nullid,
22 nullid,
23 nullrev,
23 nullrev,
24 )
24 )
25 from .thirdparty import (
25 from .thirdparty import (
26 attr,
26 attr,
27 )
27 )
28 from . import (
28 from . import (
29 copies,
29 copies,
30 error,
30 error,
31 filemerge,
31 filemerge,
32 match as matchmod,
32 match as matchmod,
33 obsutil,
33 obsutil,
34 pycompat,
34 pycompat,
35 scmutil,
35 scmutil,
36 subrepoutil,
36 subrepoutil,
37 util,
37 util,
38 worker,
38 worker,
39 )
39 )
40
40
41 _pack = struct.pack
41 _pack = struct.pack
42 _unpack = struct.unpack
42 _unpack = struct.unpack
43
43
44 def _droponode(data):
44 def _droponode(data):
45 # used for compatibility for v1
45 # used for compatibility for v1
46 bits = data.split('\0')
46 bits = data.split('\0')
47 bits = bits[:-2] + bits[-1:]
47 bits = bits[:-2] + bits[-1:]
48 return '\0'.join(bits)
48 return '\0'.join(bits)
49
49
50 # Merge state record types. See ``mergestate`` docs for more.
50 # Merge state record types. See ``mergestate`` docs for more.
51 RECORD_LOCAL = b'L'
51 RECORD_LOCAL = b'L'
52 RECORD_OTHER = b'O'
52 RECORD_OTHER = b'O'
53 RECORD_MERGED = b'F'
53 RECORD_MERGED = b'F'
54 RECORD_CHANGEDELETE_CONFLICT = b'C'
54 RECORD_CHANGEDELETE_CONFLICT = b'C'
55 RECORD_MERGE_DRIVER_MERGE = b'D'
55 RECORD_MERGE_DRIVER_MERGE = b'D'
56 RECORD_PATH_CONFLICT = b'P'
56 RECORD_PATH_CONFLICT = b'P'
57 RECORD_MERGE_DRIVER_STATE = b'm'
57 RECORD_MERGE_DRIVER_STATE = b'm'
58 RECORD_FILE_VALUES = b'f'
58 RECORD_FILE_VALUES = b'f'
59 RECORD_LABELS = b'l'
59 RECORD_LABELS = b'l'
60 RECORD_OVERRIDE = b't'
60 RECORD_OVERRIDE = b't'
61 RECORD_UNSUPPORTED_MANDATORY = b'X'
61 RECORD_UNSUPPORTED_MANDATORY = b'X'
62 RECORD_UNSUPPORTED_ADVISORY = b'x'
62 RECORD_UNSUPPORTED_ADVISORY = b'x'
63
63
64 MERGE_DRIVER_STATE_UNMARKED = b'u'
64 MERGE_DRIVER_STATE_UNMARKED = b'u'
65 MERGE_DRIVER_STATE_MARKED = b'm'
65 MERGE_DRIVER_STATE_MARKED = b'm'
66 MERGE_DRIVER_STATE_SUCCESS = b's'
66 MERGE_DRIVER_STATE_SUCCESS = b's'
67
67
68 MERGE_RECORD_UNRESOLVED = b'u'
68 MERGE_RECORD_UNRESOLVED = b'u'
69 MERGE_RECORD_RESOLVED = b'r'
69 MERGE_RECORD_RESOLVED = b'r'
70 MERGE_RECORD_UNRESOLVED_PATH = b'pu'
70 MERGE_RECORD_UNRESOLVED_PATH = b'pu'
71 MERGE_RECORD_RESOLVED_PATH = b'pr'
71 MERGE_RECORD_RESOLVED_PATH = b'pr'
72 MERGE_RECORD_DRIVER_RESOLVED = b'd'
72 MERGE_RECORD_DRIVER_RESOLVED = b'd'
73
73
74 ACTION_FORGET = b'f'
74 ACTION_FORGET = b'f'
75 ACTION_REMOVE = b'r'
75 ACTION_REMOVE = b'r'
76 ACTION_ADD = b'a'
76 ACTION_ADD = b'a'
77 ACTION_GET = b'g'
77 ACTION_GET = b'g'
78 ACTION_PATH_CONFLICT = b'p'
78 ACTION_PATH_CONFLICT = b'p'
79 ACTION_PATH_CONFLICT_RESOLVE = b'pr'
79 ACTION_PATH_CONFLICT_RESOLVE = b'pr'
80 ACTION_ADD_MODIFIED = b'am'
80 ACTION_ADD_MODIFIED = b'am'
81 ACTION_CREATED = b'c'
81 ACTION_CREATED = b'c'
82 ACTION_DELETED_CHANGED = b'dc'
82 ACTION_DELETED_CHANGED = b'dc'
83 ACTION_CHANGED_DELETED = b'cd'
83 ACTION_CHANGED_DELETED = b'cd'
84 ACTION_MERGE = b'm'
84 ACTION_MERGE = b'm'
85 ACTION_LOCAL_DIR_RENAME_GET = b'dg'
85 ACTION_LOCAL_DIR_RENAME_GET = b'dg'
86 ACTION_DIR_RENAME_MOVE_LOCAL = b'dm'
86 ACTION_DIR_RENAME_MOVE_LOCAL = b'dm'
87 ACTION_KEEP = b'k'
87 ACTION_KEEP = b'k'
88 ACTION_EXEC = b'e'
88 ACTION_EXEC = b'e'
89 ACTION_CREATED_MERGE = b'cm'
89 ACTION_CREATED_MERGE = b'cm'
90
90
91 class mergestate(object):
91 class mergestate(object):
92 '''track 3-way merge state of individual files
92 '''track 3-way merge state of individual files
93
93
94 The merge state is stored on disk when needed. Two files are used: one with
94 The merge state is stored on disk when needed. Two files are used: one with
95 an old format (version 1), and one with a new format (version 2). Version 2
95 an old format (version 1), and one with a new format (version 2). Version 2
96 stores a superset of the data in version 1, including new kinds of records
96 stores a superset of the data in version 1, including new kinds of records
97 in the future. For more about the new format, see the documentation for
97 in the future. For more about the new format, see the documentation for
98 `_readrecordsv2`.
98 `_readrecordsv2`.
99
99
100 Each record can contain arbitrary content, and has an associated type. This
100 Each record can contain arbitrary content, and has an associated type. This
101 `type` should be a letter. If `type` is uppercase, the record is mandatory:
101 `type` should be a letter. If `type` is uppercase, the record is mandatory:
102 versions of Mercurial that don't support it should abort. If `type` is
102 versions of Mercurial that don't support it should abort. If `type` is
103 lowercase, the record can be safely ignored.
103 lowercase, the record can be safely ignored.
104
104
105 Currently known records:
105 Currently known records:
106
106
107 L: the node of the "local" part of the merge (hexified version)
107 L: the node of the "local" part of the merge (hexified version)
108 O: the node of the "other" part of the merge (hexified version)
108 O: the node of the "other" part of the merge (hexified version)
109 F: a file to be merged entry
109 F: a file to be merged entry
110 C: a change/delete or delete/change conflict
110 C: a change/delete or delete/change conflict
111 D: a file that the external merge driver will merge internally
111 D: a file that the external merge driver will merge internally
112 (experimental)
112 (experimental)
113 P: a path conflict (file vs directory)
113 P: a path conflict (file vs directory)
114 m: the external merge driver defined for this merge plus its run state
114 m: the external merge driver defined for this merge plus its run state
115 (experimental)
115 (experimental)
116 f: a (filename, dictionary) tuple of optional values for a given file
116 f: a (filename, dictionary) tuple of optional values for a given file
117 X: unsupported mandatory record type (used in tests)
117 X: unsupported mandatory record type (used in tests)
118 x: unsupported advisory record type (used in tests)
118 x: unsupported advisory record type (used in tests)
119 l: the labels for the parts of the merge.
119 l: the labels for the parts of the merge.
120
120
121 Merge driver run states (experimental):
121 Merge driver run states (experimental):
122 u: driver-resolved files unmarked -- needs to be run next time we're about
122 u: driver-resolved files unmarked -- needs to be run next time we're about
123 to resolve or commit
123 to resolve or commit
124 m: driver-resolved files marked -- only needs to be run before commit
124 m: driver-resolved files marked -- only needs to be run before commit
125 s: success/skipped -- does not need to be run any more
125 s: success/skipped -- does not need to be run any more
126
126
127 Merge record states (stored in self._state, indexed by filename):
127 Merge record states (stored in self._state, indexed by filename):
128 u: unresolved conflict
128 u: unresolved conflict
129 r: resolved conflict
129 r: resolved conflict
130 pu: unresolved path conflict (file conflicts with directory)
130 pu: unresolved path conflict (file conflicts with directory)
131 pr: resolved path conflict
131 pr: resolved path conflict
132 d: driver-resolved conflict
132 d: driver-resolved conflict
133
133
134 The resolve command transitions between 'u' and 'r' for conflicts and
134 The resolve command transitions between 'u' and 'r' for conflicts and
135 'pu' and 'pr' for path conflicts.
135 'pu' and 'pr' for path conflicts.
136 '''
136 '''
137 statepathv1 = 'merge/state'
137 statepathv1 = 'merge/state'
138 statepathv2 = 'merge/state2'
138 statepathv2 = 'merge/state2'
139
139
140 @staticmethod
140 @staticmethod
141 def clean(repo, node=None, other=None, labels=None):
141 def clean(repo, node=None, other=None, labels=None):
142 """Initialize a brand new merge state, removing any existing state on
142 """Initialize a brand new merge state, removing any existing state on
143 disk."""
143 disk."""
144 ms = mergestate(repo)
144 ms = mergestate(repo)
145 ms.reset(node, other, labels)
145 ms.reset(node, other, labels)
146 return ms
146 return ms
147
147
148 @staticmethod
148 @staticmethod
149 def read(repo):
149 def read(repo):
150 """Initialize the merge state, reading it from disk."""
150 """Initialize the merge state, reading it from disk."""
151 ms = mergestate(repo)
151 ms = mergestate(repo)
152 ms._read()
152 ms._read()
153 return ms
153 return ms
154
154
155 def __init__(self, repo):
155 def __init__(self, repo):
156 """Initialize the merge state.
156 """Initialize the merge state.
157
157
158 Do not use this directly! Instead call read() or clean()."""
158 Do not use this directly! Instead call read() or clean()."""
159 self._repo = repo
159 self._repo = repo
160 self._dirty = False
160 self._dirty = False
161 self._labels = None
161 self._labels = None
162
162
163 def reset(self, node=None, other=None, labels=None):
163 def reset(self, node=None, other=None, labels=None):
164 self._state = {}
164 self._state = {}
165 self._stateextras = {}
165 self._stateextras = {}
166 self._local = None
166 self._local = None
167 self._other = None
167 self._other = None
168 self._labels = labels
168 self._labels = labels
169 for var in ('localctx', 'otherctx'):
169 for var in ('localctx', 'otherctx'):
170 if var in vars(self):
170 if var in vars(self):
171 delattr(self, var)
171 delattr(self, var)
172 if node:
172 if node:
173 self._local = node
173 self._local = node
174 self._other = other
174 self._other = other
175 self._readmergedriver = None
175 self._readmergedriver = None
176 if self.mergedriver:
176 if self.mergedriver:
177 self._mdstate = MERGE_DRIVER_STATE_SUCCESS
177 self._mdstate = MERGE_DRIVER_STATE_SUCCESS
178 else:
178 else:
179 self._mdstate = MERGE_DRIVER_STATE_UNMARKED
179 self._mdstate = MERGE_DRIVER_STATE_UNMARKED
180 shutil.rmtree(self._repo.vfs.join('merge'), True)
180 shutil.rmtree(self._repo.vfs.join('merge'), True)
181 self._results = {}
181 self._results = {}
182 self._dirty = False
182 self._dirty = False
183
183
184 def _read(self):
184 def _read(self):
185 """Analyse each record content to restore a serialized state from disk
185 """Analyse each record content to restore a serialized state from disk
186
186
187 This function process "record" entry produced by the de-serialization
187 This function process "record" entry produced by the de-serialization
188 of on disk file.
188 of on disk file.
189 """
189 """
190 self._state = {}
190 self._state = {}
191 self._stateextras = {}
191 self._stateextras = {}
192 self._local = None
192 self._local = None
193 self._other = None
193 self._other = None
194 for var in ('localctx', 'otherctx'):
194 for var in ('localctx', 'otherctx'):
195 if var in vars(self):
195 if var in vars(self):
196 delattr(self, var)
196 delattr(self, var)
197 self._readmergedriver = None
197 self._readmergedriver = None
198 self._mdstate = MERGE_DRIVER_STATE_SUCCESS
198 self._mdstate = MERGE_DRIVER_STATE_SUCCESS
199 unsupported = set()
199 unsupported = set()
200 records = self._readrecords()
200 records = self._readrecords()
201 for rtype, record in records:
201 for rtype, record in records:
202 if rtype == RECORD_LOCAL:
202 if rtype == RECORD_LOCAL:
203 self._local = bin(record)
203 self._local = bin(record)
204 elif rtype == RECORD_OTHER:
204 elif rtype == RECORD_OTHER:
205 self._other = bin(record)
205 self._other = bin(record)
206 elif rtype == RECORD_MERGE_DRIVER_STATE:
206 elif rtype == RECORD_MERGE_DRIVER_STATE:
207 bits = record.split('\0', 1)
207 bits = record.split('\0', 1)
208 mdstate = bits[1]
208 mdstate = bits[1]
209 if len(mdstate) != 1 or mdstate not in (
209 if len(mdstate) != 1 or mdstate not in (
210 MERGE_DRIVER_STATE_UNMARKED, MERGE_DRIVER_STATE_MARKED,
210 MERGE_DRIVER_STATE_UNMARKED, MERGE_DRIVER_STATE_MARKED,
211 MERGE_DRIVER_STATE_SUCCESS):
211 MERGE_DRIVER_STATE_SUCCESS):
212 # the merge driver should be idempotent, so just rerun it
212 # the merge driver should be idempotent, so just rerun it
213 mdstate = MERGE_DRIVER_STATE_UNMARKED
213 mdstate = MERGE_DRIVER_STATE_UNMARKED
214
214
215 self._readmergedriver = bits[0]
215 self._readmergedriver = bits[0]
216 self._mdstate = mdstate
216 self._mdstate = mdstate
217 elif rtype in (RECORD_MERGED, RECORD_CHANGEDELETE_CONFLICT,
217 elif rtype in (RECORD_MERGED, RECORD_CHANGEDELETE_CONFLICT,
218 RECORD_PATH_CONFLICT, RECORD_MERGE_DRIVER_MERGE):
218 RECORD_PATH_CONFLICT, RECORD_MERGE_DRIVER_MERGE):
219 bits = record.split('\0')
219 bits = record.split('\0')
220 self._state[bits[0]] = bits[1:]
220 self._state[bits[0]] = bits[1:]
221 elif rtype == RECORD_FILE_VALUES:
221 elif rtype == RECORD_FILE_VALUES:
222 filename, rawextras = record.split('\0', 1)
222 filename, rawextras = record.split('\0', 1)
223 extraparts = rawextras.split('\0')
223 extraparts = rawextras.split('\0')
224 extras = {}
224 extras = {}
225 i = 0
225 i = 0
226 while i < len(extraparts):
226 while i < len(extraparts):
227 extras[extraparts[i]] = extraparts[i + 1]
227 extras[extraparts[i]] = extraparts[i + 1]
228 i += 2
228 i += 2
229
229
230 self._stateextras[filename] = extras
230 self._stateextras[filename] = extras
231 elif rtype == RECORD_LABELS:
231 elif rtype == RECORD_LABELS:
232 labels = record.split('\0', 2)
232 labels = record.split('\0', 2)
233 self._labels = [l for l in labels if len(l) > 0]
233 self._labels = [l for l in labels if len(l) > 0]
234 elif not rtype.islower():
234 elif not rtype.islower():
235 unsupported.add(rtype)
235 unsupported.add(rtype)
236 self._results = {}
236 self._results = {}
237 self._dirty = False
237 self._dirty = False
238
238
239 if unsupported:
239 if unsupported:
240 raise error.UnsupportedMergeRecords(unsupported)
240 raise error.UnsupportedMergeRecords(unsupported)
241
241
242 def _readrecords(self):
242 def _readrecords(self):
243 """Read merge state from disk and return a list of record (TYPE, data)
243 """Read merge state from disk and return a list of record (TYPE, data)
244
244
245 We read data from both v1 and v2 files and decide which one to use.
245 We read data from both v1 and v2 files and decide which one to use.
246
246
247 V1 has been used by version prior to 2.9.1 and contains less data than
247 V1 has been used by version prior to 2.9.1 and contains less data than
248 v2. We read both versions and check if no data in v2 contradicts
248 v2. We read both versions and check if no data in v2 contradicts
249 v1. If there is not contradiction we can safely assume that both v1
249 v1. If there is not contradiction we can safely assume that both v1
250 and v2 were written at the same time and use the extract data in v2. If
250 and v2 were written at the same time and use the extract data in v2. If
251 there is contradiction we ignore v2 content as we assume an old version
251 there is contradiction we ignore v2 content as we assume an old version
252 of Mercurial has overwritten the mergestate file and left an old v2
252 of Mercurial has overwritten the mergestate file and left an old v2
253 file around.
253 file around.
254
254
255 returns list of record [(TYPE, data), ...]"""
255 returns list of record [(TYPE, data), ...]"""
256 v1records = self._readrecordsv1()
256 v1records = self._readrecordsv1()
257 v2records = self._readrecordsv2()
257 v2records = self._readrecordsv2()
258 if self._v1v2match(v1records, v2records):
258 if self._v1v2match(v1records, v2records):
259 return v2records
259 return v2records
260 else:
260 else:
261 # v1 file is newer than v2 file, use it
261 # v1 file is newer than v2 file, use it
262 # we have to infer the "other" changeset of the merge
262 # we have to infer the "other" changeset of the merge
263 # we cannot do better than that with v1 of the format
263 # we cannot do better than that with v1 of the format
264 mctx = self._repo[None].parents()[-1]
264 mctx = self._repo[None].parents()[-1]
265 v1records.append((RECORD_OTHER, mctx.hex()))
265 v1records.append((RECORD_OTHER, mctx.hex()))
266 # add place holder "other" file node information
266 # add place holder "other" file node information
267 # nobody is using it yet so we do no need to fetch the data
267 # nobody is using it yet so we do no need to fetch the data
268 # if mctx was wrong `mctx[bits[-2]]` may fails.
268 # if mctx was wrong `mctx[bits[-2]]` may fails.
269 for idx, r in enumerate(v1records):
269 for idx, r in enumerate(v1records):
270 if r[0] == RECORD_MERGED:
270 if r[0] == RECORD_MERGED:
271 bits = r[1].split('\0')
271 bits = r[1].split('\0')
272 bits.insert(-2, '')
272 bits.insert(-2, '')
273 v1records[idx] = (r[0], '\0'.join(bits))
273 v1records[idx] = (r[0], '\0'.join(bits))
274 return v1records
274 return v1records
275
275
276 def _v1v2match(self, v1records, v2records):
276 def _v1v2match(self, v1records, v2records):
277 oldv2 = set() # old format version of v2 record
277 oldv2 = set() # old format version of v2 record
278 for rec in v2records:
278 for rec in v2records:
279 if rec[0] == RECORD_LOCAL:
279 if rec[0] == RECORD_LOCAL:
280 oldv2.add(rec)
280 oldv2.add(rec)
281 elif rec[0] == RECORD_MERGED:
281 elif rec[0] == RECORD_MERGED:
282 # drop the onode data (not contained in v1)
282 # drop the onode data (not contained in v1)
283 oldv2.add((RECORD_MERGED, _droponode(rec[1])))
283 oldv2.add((RECORD_MERGED, _droponode(rec[1])))
284 for rec in v1records:
284 for rec in v1records:
285 if rec not in oldv2:
285 if rec not in oldv2:
286 return False
286 return False
287 else:
287 else:
288 return True
288 return True
289
289
290 def _readrecordsv1(self):
290 def _readrecordsv1(self):
291 """read on disk merge state for version 1 file
291 """read on disk merge state for version 1 file
292
292
293 returns list of record [(TYPE, data), ...]
293 returns list of record [(TYPE, data), ...]
294
294
295 Note: the "F" data from this file are one entry short
295 Note: the "F" data from this file are one entry short
296 (no "other file node" entry)
296 (no "other file node" entry)
297 """
297 """
298 records = []
298 records = []
299 try:
299 try:
300 f = self._repo.vfs(self.statepathv1)
300 f = self._repo.vfs(self.statepathv1)
301 for i, l in enumerate(f):
301 for i, l in enumerate(f):
302 if i == 0:
302 if i == 0:
303 records.append((RECORD_LOCAL, l[:-1]))
303 records.append((RECORD_LOCAL, l[:-1]))
304 else:
304 else:
305 records.append((RECORD_MERGED, l[:-1]))
305 records.append((RECORD_MERGED, l[:-1]))
306 f.close()
306 f.close()
307 except IOError as err:
307 except IOError as err:
308 if err.errno != errno.ENOENT:
308 if err.errno != errno.ENOENT:
309 raise
309 raise
310 return records
310 return records
311
311
312 def _readrecordsv2(self):
312 def _readrecordsv2(self):
313 """read on disk merge state for version 2 file
313 """read on disk merge state for version 2 file
314
314
315 This format is a list of arbitrary records of the form:
315 This format is a list of arbitrary records of the form:
316
316
317 [type][length][content]
317 [type][length][content]
318
318
319 `type` is a single character, `length` is a 4 byte integer, and
319 `type` is a single character, `length` is a 4 byte integer, and
320 `content` is an arbitrary byte sequence of length `length`.
320 `content` is an arbitrary byte sequence of length `length`.
321
321
322 Mercurial versions prior to 3.7 have a bug where if there are
322 Mercurial versions prior to 3.7 have a bug where if there are
323 unsupported mandatory merge records, attempting to clear out the merge
323 unsupported mandatory merge records, attempting to clear out the merge
324 state with hg update --clean or similar aborts. The 't' record type
324 state with hg update --clean or similar aborts. The 't' record type
325 works around that by writing out what those versions treat as an
325 works around that by writing out what those versions treat as an
326 advisory record, but later versions interpret as special: the first
326 advisory record, but later versions interpret as special: the first
327 character is the 'real' record type and everything onwards is the data.
327 character is the 'real' record type and everything onwards is the data.
328
328
329 Returns list of records [(TYPE, data), ...]."""
329 Returns list of records [(TYPE, data), ...]."""
330 records = []
330 records = []
331 try:
331 try:
332 f = self._repo.vfs(self.statepathv2)
332 f = self._repo.vfs(self.statepathv2)
333 data = f.read()
333 data = f.read()
334 off = 0
334 off = 0
335 end = len(data)
335 end = len(data)
336 while off < end:
336 while off < end:
337 rtype = data[off:off + 1]
337 rtype = data[off:off + 1]
338 off += 1
338 off += 1
339 length = _unpack('>I', data[off:(off + 4)])[0]
339 length = _unpack('>I', data[off:(off + 4)])[0]
340 off += 4
340 off += 4
341 record = data[off:(off + length)]
341 record = data[off:(off + length)]
342 off += length
342 off += length
343 if rtype == RECORD_OVERRIDE:
343 if rtype == RECORD_OVERRIDE:
344 rtype, record = record[0:1], record[1:]
344 rtype, record = record[0:1], record[1:]
345 records.append((rtype, record))
345 records.append((rtype, record))
346 f.close()
346 f.close()
347 except IOError as err:
347 except IOError as err:
348 if err.errno != errno.ENOENT:
348 if err.errno != errno.ENOENT:
349 raise
349 raise
350 return records
350 return records
351
351
352 @util.propertycache
352 @util.propertycache
353 def mergedriver(self):
353 def mergedriver(self):
354 # protect against the following:
354 # protect against the following:
355 # - A configures a malicious merge driver in their hgrc, then
355 # - A configures a malicious merge driver in their hgrc, then
356 # pauses the merge
356 # pauses the merge
357 # - A edits their hgrc to remove references to the merge driver
357 # - A edits their hgrc to remove references to the merge driver
358 # - A gives a copy of their entire repo, including .hg, to B
358 # - A gives a copy of their entire repo, including .hg, to B
359 # - B inspects .hgrc and finds it to be clean
359 # - B inspects .hgrc and finds it to be clean
360 # - B then continues the merge and the malicious merge driver
360 # - B then continues the merge and the malicious merge driver
361 # gets invoked
361 # gets invoked
362 configmergedriver = self._repo.ui.config('experimental', 'mergedriver')
362 configmergedriver = self._repo.ui.config('experimental', 'mergedriver')
363 if (self._readmergedriver is not None
363 if (self._readmergedriver is not None
364 and self._readmergedriver != configmergedriver):
364 and self._readmergedriver != configmergedriver):
365 raise error.ConfigError(
365 raise error.ConfigError(
366 _("merge driver changed since merge started"),
366 _("merge driver changed since merge started"),
367 hint=_("revert merge driver change or abort merge"))
367 hint=_("revert merge driver change or abort merge"))
368
368
369 return configmergedriver
369 return configmergedriver
370
370
371 @util.propertycache
371 @util.propertycache
372 def localctx(self):
372 def localctx(self):
373 if self._local is None:
373 if self._local is None:
374 msg = "localctx accessed but self._local isn't set"
374 msg = "localctx accessed but self._local isn't set"
375 raise error.ProgrammingError(msg)
375 raise error.ProgrammingError(msg)
376 return self._repo[self._local]
376 return self._repo[self._local]
377
377
378 @util.propertycache
378 @util.propertycache
379 def otherctx(self):
379 def otherctx(self):
380 if self._other is None:
380 if self._other is None:
381 msg = "otherctx accessed but self._other isn't set"
381 msg = "otherctx accessed but self._other isn't set"
382 raise error.ProgrammingError(msg)
382 raise error.ProgrammingError(msg)
383 return self._repo[self._other]
383 return self._repo[self._other]
384
384
385 def active(self):
385 def active(self):
386 """Whether mergestate is active.
386 """Whether mergestate is active.
387
387
388 Returns True if there appears to be mergestate. This is a rough proxy
388 Returns True if there appears to be mergestate. This is a rough proxy
389 for "is a merge in progress."
389 for "is a merge in progress."
390 """
390 """
391 # Check local variables before looking at filesystem for performance
391 # Check local variables before looking at filesystem for performance
392 # reasons.
392 # reasons.
393 return bool(self._local) or bool(self._state) or \
393 return bool(self._local) or bool(self._state) or \
394 self._repo.vfs.exists(self.statepathv1) or \
394 self._repo.vfs.exists(self.statepathv1) or \
395 self._repo.vfs.exists(self.statepathv2)
395 self._repo.vfs.exists(self.statepathv2)
396
396
397 def commit(self):
397 def commit(self):
398 """Write current state on disk (if necessary)"""
398 """Write current state on disk (if necessary)"""
399 if self._dirty:
399 if self._dirty:
400 records = self._makerecords()
400 records = self._makerecords()
401 self._writerecords(records)
401 self._writerecords(records)
402 self._dirty = False
402 self._dirty = False
403
403
404 def _makerecords(self):
404 def _makerecords(self):
405 records = []
405 records = []
406 records.append((RECORD_LOCAL, hex(self._local)))
406 records.append((RECORD_LOCAL, hex(self._local)))
407 records.append((RECORD_OTHER, hex(self._other)))
407 records.append((RECORD_OTHER, hex(self._other)))
408 if self.mergedriver:
408 if self.mergedriver:
409 records.append((RECORD_MERGE_DRIVER_STATE, '\0'.join([
409 records.append((RECORD_MERGE_DRIVER_STATE, '\0'.join([
410 self.mergedriver, self._mdstate])))
410 self.mergedriver, self._mdstate])))
411 # Write out state items. In all cases, the value of the state map entry
411 # Write out state items. In all cases, the value of the state map entry
412 # is written as the contents of the record. The record type depends on
412 # is written as the contents of the record. The record type depends on
413 # the type of state that is stored, and capital-letter records are used
413 # the type of state that is stored, and capital-letter records are used
414 # to prevent older versions of Mercurial that do not support the feature
414 # to prevent older versions of Mercurial that do not support the feature
415 # from loading them.
415 # from loading them.
416 for filename, v in self._state.iteritems():
416 for filename, v in self._state.iteritems():
417 if v[0] == MERGE_RECORD_DRIVER_RESOLVED:
417 if v[0] == MERGE_RECORD_DRIVER_RESOLVED:
418 # Driver-resolved merge. These are stored in 'D' records.
418 # Driver-resolved merge. These are stored in 'D' records.
419 records.append((RECORD_MERGE_DRIVER_MERGE,
419 records.append((RECORD_MERGE_DRIVER_MERGE,
420 '\0'.join([filename] + v)))
420 '\0'.join([filename] + v)))
421 elif v[0] in (MERGE_RECORD_UNRESOLVED_PATH,
421 elif v[0] in (MERGE_RECORD_UNRESOLVED_PATH,
422 MERGE_RECORD_RESOLVED_PATH):
422 MERGE_RECORD_RESOLVED_PATH):
423 # Path conflicts. These are stored in 'P' records. The current
423 # Path conflicts. These are stored in 'P' records. The current
424 # resolution state ('pu' or 'pr') is stored within the record.
424 # resolution state ('pu' or 'pr') is stored within the record.
425 records.append((RECORD_PATH_CONFLICT,
425 records.append((RECORD_PATH_CONFLICT,
426 '\0'.join([filename] + v)))
426 '\0'.join([filename] + v)))
427 elif v[1] == nullhex or v[6] == nullhex:
427 elif v[1] == nullhex or v[6] == nullhex:
428 # Change/Delete or Delete/Change conflicts. These are stored in
428 # Change/Delete or Delete/Change conflicts. These are stored in
429 # 'C' records. v[1] is the local file, and is nullhex when the
429 # 'C' records. v[1] is the local file, and is nullhex when the
430 # file is deleted locally ('dc'). v[6] is the remote file, and
430 # file is deleted locally ('dc'). v[6] is the remote file, and
431 # is nullhex when the file is deleted remotely ('cd').
431 # is nullhex when the file is deleted remotely ('cd').
432 records.append((RECORD_CHANGEDELETE_CONFLICT,
432 records.append((RECORD_CHANGEDELETE_CONFLICT,
433 '\0'.join([filename] + v)))
433 '\0'.join([filename] + v)))
434 else:
434 else:
435 # Normal files. These are stored in 'F' records.
435 # Normal files. These are stored in 'F' records.
436 records.append((RECORD_MERGED,
436 records.append((RECORD_MERGED,
437 '\0'.join([filename] + v)))
437 '\0'.join([filename] + v)))
438 for filename, extras in sorted(self._stateextras.iteritems()):
438 for filename, extras in sorted(self._stateextras.iteritems()):
439 rawextras = '\0'.join('%s\0%s' % (k, v) for k, v in
439 rawextras = '\0'.join('%s\0%s' % (k, v) for k, v in
440 extras.iteritems())
440 extras.iteritems())
441 records.append((RECORD_FILE_VALUES,
441 records.append((RECORD_FILE_VALUES,
442 '%s\0%s' % (filename, rawextras)))
442 '%s\0%s' % (filename, rawextras)))
443 if self._labels is not None:
443 if self._labels is not None:
444 labels = '\0'.join(self._labels)
444 labels = '\0'.join(self._labels)
445 records.append((RECORD_LABELS, labels))
445 records.append((RECORD_LABELS, labels))
446 return records
446 return records
447
447
448 def _writerecords(self, records):
448 def _writerecords(self, records):
449 """Write current state on disk (both v1 and v2)"""
449 """Write current state on disk (both v1 and v2)"""
450 self._writerecordsv1(records)
450 self._writerecordsv1(records)
451 self._writerecordsv2(records)
451 self._writerecordsv2(records)
452
452
453 def _writerecordsv1(self, records):
453 def _writerecordsv1(self, records):
454 """Write current state on disk in a version 1 file"""
454 """Write current state on disk in a version 1 file"""
455 f = self._repo.vfs(self.statepathv1, 'wb')
455 f = self._repo.vfs(self.statepathv1, 'wb')
456 irecords = iter(records)
456 irecords = iter(records)
457 lrecords = next(irecords)
457 lrecords = next(irecords)
458 assert lrecords[0] == RECORD_LOCAL
458 assert lrecords[0] == RECORD_LOCAL
459 f.write(hex(self._local) + '\n')
459 f.write(hex(self._local) + '\n')
460 for rtype, data in irecords:
460 for rtype, data in irecords:
461 if rtype == RECORD_MERGED:
461 if rtype == RECORD_MERGED:
462 f.write('%s\n' % _droponode(data))
462 f.write('%s\n' % _droponode(data))
463 f.close()
463 f.close()
464
464
465 def _writerecordsv2(self, records):
465 def _writerecordsv2(self, records):
466 """Write current state on disk in a version 2 file
466 """Write current state on disk in a version 2 file
467
467
468 See the docstring for _readrecordsv2 for why we use 't'."""
468 See the docstring for _readrecordsv2 for why we use 't'."""
469 # these are the records that all version 2 clients can read
469 # these are the records that all version 2 clients can read
470 allowlist = (RECORD_LOCAL, RECORD_OTHER, RECORD_MERGED)
470 allowlist = (RECORD_LOCAL, RECORD_OTHER, RECORD_MERGED)
471 f = self._repo.vfs(self.statepathv2, 'wb')
471 f = self._repo.vfs(self.statepathv2, 'wb')
472 for key, data in records:
472 for key, data in records:
473 assert len(key) == 1
473 assert len(key) == 1
474 if key not in allowlist:
474 if key not in allowlist:
475 key, data = RECORD_OVERRIDE, '%s%s' % (key, data)
475 key, data = RECORD_OVERRIDE, '%s%s' % (key, data)
476 format = '>sI%is' % len(data)
476 format = '>sI%is' % len(data)
477 f.write(_pack(format, key, len(data), data))
477 f.write(_pack(format, key, len(data), data))
478 f.close()
478 f.close()
479
479
480 def add(self, fcl, fco, fca, fd):
480 def add(self, fcl, fco, fca, fd):
481 """add a new (potentially?) conflicting file the merge state
481 """add a new (potentially?) conflicting file the merge state
482 fcl: file context for local,
482 fcl: file context for local,
483 fco: file context for remote,
483 fco: file context for remote,
484 fca: file context for ancestors,
484 fca: file context for ancestors,
485 fd: file path of the resulting merge.
485 fd: file path of the resulting merge.
486
486
487 note: also write the local version to the `.hg/merge` directory.
487 note: also write the local version to the `.hg/merge` directory.
488 """
488 """
489 if fcl.isabsent():
489 if fcl.isabsent():
490 hash = nullhex
490 hash = nullhex
491 else:
491 else:
492 hash = hex(hashlib.sha1(fcl.path()).digest())
492 hash = hex(hashlib.sha1(fcl.path()).digest())
493 self._repo.vfs.write('merge/' + hash, fcl.data())
493 self._repo.vfs.write('merge/' + hash, fcl.data())
494 self._state[fd] = [MERGE_RECORD_UNRESOLVED, hash, fcl.path(),
494 self._state[fd] = [MERGE_RECORD_UNRESOLVED, hash, fcl.path(),
495 fca.path(), hex(fca.filenode()),
495 fca.path(), hex(fca.filenode()),
496 fco.path(), hex(fco.filenode()),
496 fco.path(), hex(fco.filenode()),
497 fcl.flags()]
497 fcl.flags()]
498 self._stateextras[fd] = {'ancestorlinknode': hex(fca.node())}
498 self._stateextras[fd] = {'ancestorlinknode': hex(fca.node())}
499 self._dirty = True
499 self._dirty = True
500
500
501 def addpath(self, path, frename, forigin):
501 def addpath(self, path, frename, forigin):
502 """add a new conflicting path to the merge state
502 """add a new conflicting path to the merge state
503 path: the path that conflicts
503 path: the path that conflicts
504 frename: the filename the conflicting file was renamed to
504 frename: the filename the conflicting file was renamed to
505 forigin: origin of the file ('l' or 'r' for local/remote)
505 forigin: origin of the file ('l' or 'r' for local/remote)
506 """
506 """
507 self._state[path] = [MERGE_RECORD_UNRESOLVED_PATH, frename, forigin]
507 self._state[path] = [MERGE_RECORD_UNRESOLVED_PATH, frename, forigin]
508 self._dirty = True
508 self._dirty = True
509
509
510 def __contains__(self, dfile):
510 def __contains__(self, dfile):
511 return dfile in self._state
511 return dfile in self._state
512
512
513 def __getitem__(self, dfile):
513 def __getitem__(self, dfile):
514 return self._state[dfile][0]
514 return self._state[dfile][0]
515
515
516 def __iter__(self):
516 def __iter__(self):
517 return iter(sorted(self._state))
517 return iter(sorted(self._state))
518
518
519 def files(self):
519 def files(self):
520 return self._state.keys()
520 return self._state.keys()
521
521
522 def mark(self, dfile, state):
522 def mark(self, dfile, state):
523 self._state[dfile][0] = state
523 self._state[dfile][0] = state
524 self._dirty = True
524 self._dirty = True
525
525
526 def mdstate(self):
526 def mdstate(self):
527 return self._mdstate
527 return self._mdstate
528
528
529 def unresolved(self):
529 def unresolved(self):
530 """Obtain the paths of unresolved files."""
530 """Obtain the paths of unresolved files."""
531
531
532 for f, entry in self._state.iteritems():
532 for f, entry in self._state.iteritems():
533 if entry[0] in (MERGE_RECORD_UNRESOLVED,
533 if entry[0] in (MERGE_RECORD_UNRESOLVED,
534 MERGE_RECORD_UNRESOLVED_PATH):
534 MERGE_RECORD_UNRESOLVED_PATH):
535 yield f
535 yield f
536
536
537 def driverresolved(self):
537 def driverresolved(self):
538 """Obtain the paths of driver-resolved files."""
538 """Obtain the paths of driver-resolved files."""
539
539
540 for f, entry in self._state.items():
540 for f, entry in self._state.items():
541 if entry[0] == MERGE_RECORD_DRIVER_RESOLVED:
541 if entry[0] == MERGE_RECORD_DRIVER_RESOLVED:
542 yield f
542 yield f
543
543
544 def extras(self, filename):
544 def extras(self, filename):
545 return self._stateextras.setdefault(filename, {})
545 return self._stateextras.setdefault(filename, {})
546
546
547 def _resolve(self, preresolve, dfile, wctx):
547 def _resolve(self, preresolve, dfile, wctx):
548 """rerun merge process for file path `dfile`"""
548 """rerun merge process for file path `dfile`"""
549 if self[dfile] in (MERGE_RECORD_RESOLVED,
549 if self[dfile] in (MERGE_RECORD_RESOLVED,
550 MERGE_RECORD_DRIVER_RESOLVED):
550 MERGE_RECORD_DRIVER_RESOLVED):
551 return True, 0
551 return True, 0
552 stateentry = self._state[dfile]
552 stateentry = self._state[dfile]
553 state, hash, lfile, afile, anode, ofile, onode, flags = stateentry
553 state, hash, lfile, afile, anode, ofile, onode, flags = stateentry
554 octx = self._repo[self._other]
554 octx = self._repo[self._other]
555 extras = self.extras(dfile)
555 extras = self.extras(dfile)
556 anccommitnode = extras.get('ancestorlinknode')
556 anccommitnode = extras.get('ancestorlinknode')
557 if anccommitnode:
557 if anccommitnode:
558 actx = self._repo[anccommitnode]
558 actx = self._repo[anccommitnode]
559 else:
559 else:
560 actx = None
560 actx = None
561 fcd = self._filectxorabsent(hash, wctx, dfile)
561 fcd = self._filectxorabsent(hash, wctx, dfile)
562 fco = self._filectxorabsent(onode, octx, ofile)
562 fco = self._filectxorabsent(onode, octx, ofile)
563 # TODO: move this to filectxorabsent
563 # TODO: move this to filectxorabsent
564 fca = self._repo.filectx(afile, fileid=anode, changectx=actx)
564 fca = self._repo.filectx(afile, fileid=anode, changectx=actx)
565 # "premerge" x flags
565 # "premerge" x flags
566 flo = fco.flags()
566 flo = fco.flags()
567 fla = fca.flags()
567 fla = fca.flags()
568 if 'x' in flags + flo + fla and 'l' not in flags + flo + fla:
568 if 'x' in flags + flo + fla and 'l' not in flags + flo + fla:
569 if fca.node() == nullid and flags != flo:
569 if fca.node() == nullid and flags != flo:
570 if preresolve:
570 if preresolve:
571 self._repo.ui.warn(
571 self._repo.ui.warn(
572 _('warning: cannot merge flags for %s '
572 _('warning: cannot merge flags for %s '
573 'without common ancestor - keeping local flags\n')
573 'without common ancestor - keeping local flags\n')
574 % afile)
574 % afile)
575 elif flags == fla:
575 elif flags == fla:
576 flags = flo
576 flags = flo
577 if preresolve:
577 if preresolve:
578 # restore local
578 # restore local
579 if hash != nullhex:
579 if hash != nullhex:
580 f = self._repo.vfs('merge/' + hash)
580 f = self._repo.vfs('merge/' + hash)
581 wctx[dfile].write(f.read(), flags)
581 wctx[dfile].write(f.read(), flags)
582 f.close()
582 f.close()
583 else:
583 else:
584 wctx[dfile].remove(ignoremissing=True)
584 wctx[dfile].remove(ignoremissing=True)
585 complete, r, deleted = filemerge.premerge(self._repo, wctx,
585 complete, r, deleted = filemerge.premerge(self._repo, wctx,
586 self._local, lfile, fcd,
586 self._local, lfile, fcd,
587 fco, fca,
587 fco, fca,
588 labels=self._labels)
588 labels=self._labels)
589 else:
589 else:
590 complete, r, deleted = filemerge.filemerge(self._repo, wctx,
590 complete, r, deleted = filemerge.filemerge(self._repo, wctx,
591 self._local, lfile, fcd,
591 self._local, lfile, fcd,
592 fco, fca,
592 fco, fca,
593 labels=self._labels)
593 labels=self._labels)
594 if r is None:
594 if r is None:
595 # no real conflict
595 # no real conflict
596 del self._state[dfile]
596 del self._state[dfile]
597 self._stateextras.pop(dfile, None)
597 self._stateextras.pop(dfile, None)
598 self._dirty = True
598 self._dirty = True
599 elif not r:
599 elif not r:
600 self.mark(dfile, MERGE_RECORD_RESOLVED)
600 self.mark(dfile, MERGE_RECORD_RESOLVED)
601
601
602 if complete:
602 if complete:
603 action = None
603 action = None
604 if deleted:
604 if deleted:
605 if fcd.isabsent():
605 if fcd.isabsent():
606 # dc: local picked. Need to drop if present, which may
606 # dc: local picked. Need to drop if present, which may
607 # happen on re-resolves.
607 # happen on re-resolves.
608 action = ACTION_FORGET
608 action = ACTION_FORGET
609 else:
609 else:
610 # cd: remote picked (or otherwise deleted)
610 # cd: remote picked (or otherwise deleted)
611 action = ACTION_REMOVE
611 action = ACTION_REMOVE
612 else:
612 else:
613 if fcd.isabsent(): # dc: remote picked
613 if fcd.isabsent(): # dc: remote picked
614 action = ACTION_GET
614 action = ACTION_GET
615 elif fco.isabsent(): # cd: local picked
615 elif fco.isabsent(): # cd: local picked
616 if dfile in self.localctx:
616 if dfile in self.localctx:
617 action = ACTION_ADD_MODIFIED
617 action = ACTION_ADD_MODIFIED
618 else:
618 else:
619 action = ACTION_ADD
619 action = ACTION_ADD
620 # else: regular merges (no action necessary)
620 # else: regular merges (no action necessary)
621 self._results[dfile] = r, action
621 self._results[dfile] = r, action
622
622
623 return complete, r
623 return complete, r
624
624
625 def _filectxorabsent(self, hexnode, ctx, f):
625 def _filectxorabsent(self, hexnode, ctx, f):
626 if hexnode == nullhex:
626 if hexnode == nullhex:
627 return filemerge.absentfilectx(ctx, f)
627 return filemerge.absentfilectx(ctx, f)
628 else:
628 else:
629 return ctx[f]
629 return ctx[f]
630
630
631 def preresolve(self, dfile, wctx):
631 def preresolve(self, dfile, wctx):
632 """run premerge process for dfile
632 """run premerge process for dfile
633
633
634 Returns whether the merge is complete, and the exit code."""
634 Returns whether the merge is complete, and the exit code."""
635 return self._resolve(True, dfile, wctx)
635 return self._resolve(True, dfile, wctx)
636
636
637 def resolve(self, dfile, wctx):
637 def resolve(self, dfile, wctx):
638 """run merge process (assuming premerge was run) for dfile
638 """run merge process (assuming premerge was run) for dfile
639
639
640 Returns the exit code of the merge."""
640 Returns the exit code of the merge."""
641 return self._resolve(False, dfile, wctx)[1]
641 return self._resolve(False, dfile, wctx)[1]
642
642
643 def counts(self):
643 def counts(self):
644 """return counts for updated, merged and removed files in this
644 """return counts for updated, merged and removed files in this
645 session"""
645 session"""
646 updated, merged, removed = 0, 0, 0
646 updated, merged, removed = 0, 0, 0
647 for r, action in self._results.itervalues():
647 for r, action in self._results.itervalues():
648 if r is None:
648 if r is None:
649 updated += 1
649 updated += 1
650 elif r == 0:
650 elif r == 0:
651 if action == ACTION_REMOVE:
651 if action == ACTION_REMOVE:
652 removed += 1
652 removed += 1
653 else:
653 else:
654 merged += 1
654 merged += 1
655 return updated, merged, removed
655 return updated, merged, removed
656
656
657 def unresolvedcount(self):
657 def unresolvedcount(self):
658 """get unresolved count for this merge (persistent)"""
658 """get unresolved count for this merge (persistent)"""
659 return len(list(self.unresolved()))
659 return len(list(self.unresolved()))
660
660
661 def actions(self):
661 def actions(self):
662 """return lists of actions to perform on the dirstate"""
662 """return lists of actions to perform on the dirstate"""
663 actions = {
663 actions = {
664 ACTION_REMOVE: [],
664 ACTION_REMOVE: [],
665 ACTION_FORGET: [],
665 ACTION_FORGET: [],
666 ACTION_ADD: [],
666 ACTION_ADD: [],
667 ACTION_ADD_MODIFIED: [],
667 ACTION_ADD_MODIFIED: [],
668 ACTION_GET: [],
668 ACTION_GET: [],
669 }
669 }
670 for f, (r, action) in self._results.iteritems():
670 for f, (r, action) in self._results.iteritems():
671 if action is not None:
671 if action is not None:
672 actions[action].append((f, None, "merge result"))
672 actions[action].append((f, None, "merge result"))
673 return actions
673 return actions
674
674
675 def recordactions(self):
675 def recordactions(self):
676 """record remove/add/get actions in the dirstate"""
676 """record remove/add/get actions in the dirstate"""
677 branchmerge = self._repo.dirstate.p2() != nullid
677 branchmerge = self._repo.dirstate.p2() != nullid
678 recordupdates(self._repo, self.actions(), branchmerge)
678 recordupdates(self._repo, self.actions(), branchmerge)
679
679
680 def queueremove(self, f):
680 def queueremove(self, f):
681 """queues a file to be removed from the dirstate
681 """queues a file to be removed from the dirstate
682
682
683 Meant for use by custom merge drivers."""
683 Meant for use by custom merge drivers."""
684 self._results[f] = 0, ACTION_REMOVE
684 self._results[f] = 0, ACTION_REMOVE
685
685
686 def queueadd(self, f):
686 def queueadd(self, f):
687 """queues a file to be added to the dirstate
687 """queues a file to be added to the dirstate
688
688
689 Meant for use by custom merge drivers."""
689 Meant for use by custom merge drivers."""
690 self._results[f] = 0, ACTION_ADD
690 self._results[f] = 0, ACTION_ADD
691
691
692 def queueget(self, f):
692 def queueget(self, f):
693 """queues a file to be marked modified in the dirstate
693 """queues a file to be marked modified in the dirstate
694
694
695 Meant for use by custom merge drivers."""
695 Meant for use by custom merge drivers."""
696 self._results[f] = 0, ACTION_GET
696 self._results[f] = 0, ACTION_GET
697
697
698 def _getcheckunknownconfig(repo, section, name):
698 def _getcheckunknownconfig(repo, section, name):
699 config = repo.ui.config(section, name)
699 config = repo.ui.config(section, name)
700 valid = ['abort', 'ignore', 'warn']
700 valid = ['abort', 'ignore', 'warn']
701 if config not in valid:
701 if config not in valid:
702 validstr = ', '.join(["'" + v + "'" for v in valid])
702 validstr = ', '.join(["'" + v + "'" for v in valid])
703 raise error.ConfigError(_("%s.%s not valid "
703 raise error.ConfigError(_("%s.%s not valid "
704 "('%s' is none of %s)")
704 "('%s' is none of %s)")
705 % (section, name, config, validstr))
705 % (section, name, config, validstr))
706 return config
706 return config
707
707
708 def _checkunknownfile(repo, wctx, mctx, f, f2=None):
708 def _checkunknownfile(repo, wctx, mctx, f, f2=None):
709 if wctx.isinmemory():
709 if wctx.isinmemory():
710 # Nothing to do in IMM because nothing in the "working copy" can be an
710 # Nothing to do in IMM because nothing in the "working copy" can be an
711 # unknown file.
711 # unknown file.
712 #
712 #
713 # Note that we should bail out here, not in ``_checkunknownfiles()``,
713 # Note that we should bail out here, not in ``_checkunknownfiles()``,
714 # because that function does other useful work.
714 # because that function does other useful work.
715 return False
715 return False
716
716
717 if f2 is None:
717 if f2 is None:
718 f2 = f
718 f2 = f
719 return (repo.wvfs.audit.check(f)
719 return (repo.wvfs.audit.check(f)
720 and repo.wvfs.isfileorlink(f)
720 and repo.wvfs.isfileorlink(f)
721 and repo.dirstate.normalize(f) not in repo.dirstate
721 and repo.dirstate.normalize(f) not in repo.dirstate
722 and mctx[f2].cmp(wctx[f]))
722 and mctx[f2].cmp(wctx[f]))
723
723
724 class _unknowndirschecker(object):
724 class _unknowndirschecker(object):
725 """
725 """
726 Look for any unknown files or directories that may have a path conflict
726 Look for any unknown files or directories that may have a path conflict
727 with a file. If any path prefix of the file exists as a file or link,
727 with a file. If any path prefix of the file exists as a file or link,
728 then it conflicts. If the file itself is a directory that contains any
728 then it conflicts. If the file itself is a directory that contains any
729 file that is not tracked, then it conflicts.
729 file that is not tracked, then it conflicts.
730
730
731 Returns the shortest path at which a conflict occurs, or None if there is
731 Returns the shortest path at which a conflict occurs, or None if there is
732 no conflict.
732 no conflict.
733 """
733 """
734 def __init__(self):
734 def __init__(self):
735 # A set of paths known to be good. This prevents repeated checking of
735 # A set of paths known to be good. This prevents repeated checking of
736 # dirs. It will be updated with any new dirs that are checked and found
736 # dirs. It will be updated with any new dirs that are checked and found
737 # to be safe.
737 # to be safe.
738 self._unknowndircache = set()
738 self._unknowndircache = set()
739
739
740 # A set of paths that are known to be absent. This prevents repeated
740 # A set of paths that are known to be absent. This prevents repeated
741 # checking of subdirectories that are known not to exist. It will be
741 # checking of subdirectories that are known not to exist. It will be
742 # updated with any new dirs that are checked and found to be absent.
742 # updated with any new dirs that are checked and found to be absent.
743 self._missingdircache = set()
743 self._missingdircache = set()
744
744
745 def __call__(self, repo, wctx, f):
745 def __call__(self, repo, wctx, f):
746 if wctx.isinmemory():
746 if wctx.isinmemory():
747 # Nothing to do in IMM for the same reason as ``_checkunknownfile``.
747 # Nothing to do in IMM for the same reason as ``_checkunknownfile``.
748 return False
748 return False
749
749
750 # Check for path prefixes that exist as unknown files.
750 # Check for path prefixes that exist as unknown files.
751 for p in reversed(list(util.finddirs(f))):
751 for p in reversed(list(util.finddirs(f))):
752 if p in self._missingdircache:
752 if p in self._missingdircache:
753 return
753 return
754 if p in self._unknowndircache:
754 if p in self._unknowndircache:
755 continue
755 continue
756 if repo.wvfs.audit.check(p):
756 if repo.wvfs.audit.check(p):
757 if (repo.wvfs.isfileorlink(p)
757 if (repo.wvfs.isfileorlink(p)
758 and repo.dirstate.normalize(p) not in repo.dirstate):
758 and repo.dirstate.normalize(p) not in repo.dirstate):
759 return p
759 return p
760 if not repo.wvfs.lexists(p):
760 if not repo.wvfs.lexists(p):
761 self._missingdircache.add(p)
761 self._missingdircache.add(p)
762 return
762 return
763 self._unknowndircache.add(p)
763 self._unknowndircache.add(p)
764
764
765 # Check if the file conflicts with a directory containing unknown files.
765 # Check if the file conflicts with a directory containing unknown files.
766 if repo.wvfs.audit.check(f) and repo.wvfs.isdir(f):
766 if repo.wvfs.audit.check(f) and repo.wvfs.isdir(f):
767 # Does the directory contain any files that are not in the dirstate?
767 # Does the directory contain any files that are not in the dirstate?
768 for p, dirs, files in repo.wvfs.walk(f):
768 for p, dirs, files in repo.wvfs.walk(f):
769 for fn in files:
769 for fn in files:
770 relf = util.pconvert(repo.wvfs.reljoin(p, fn))
770 relf = util.pconvert(repo.wvfs.reljoin(p, fn))
771 relf = repo.dirstate.normalize(relf, isknown=True)
771 relf = repo.dirstate.normalize(relf, isknown=True)
772 if relf not in repo.dirstate:
772 if relf not in repo.dirstate:
773 return f
773 return f
774 return None
774 return None
775
775
776 def _checkunknownfiles(repo, wctx, mctx, force, actions, mergeforce):
776 def _checkunknownfiles(repo, wctx, mctx, force, actions, mergeforce):
777 """
777 """
778 Considers any actions that care about the presence of conflicting unknown
778 Considers any actions that care about the presence of conflicting unknown
779 files. For some actions, the result is to abort; for others, it is to
779 files. For some actions, the result is to abort; for others, it is to
780 choose a different action.
780 choose a different action.
781 """
781 """
782 fileconflicts = set()
782 fileconflicts = set()
783 pathconflicts = set()
783 pathconflicts = set()
784 warnconflicts = set()
784 warnconflicts = set()
785 abortconflicts = set()
785 abortconflicts = set()
786 unknownconfig = _getcheckunknownconfig(repo, 'merge', 'checkunknown')
786 unknownconfig = _getcheckunknownconfig(repo, 'merge', 'checkunknown')
787 ignoredconfig = _getcheckunknownconfig(repo, 'merge', 'checkignored')
787 ignoredconfig = _getcheckunknownconfig(repo, 'merge', 'checkignored')
788 pathconfig = repo.ui.configbool('experimental', 'merge.checkpathconflicts')
788 pathconfig = repo.ui.configbool('experimental', 'merge.checkpathconflicts')
789 if not force:
789 if not force:
790 def collectconflicts(conflicts, config):
790 def collectconflicts(conflicts, config):
791 if config == 'abort':
791 if config == 'abort':
792 abortconflicts.update(conflicts)
792 abortconflicts.update(conflicts)
793 elif config == 'warn':
793 elif config == 'warn':
794 warnconflicts.update(conflicts)
794 warnconflicts.update(conflicts)
795
795
796 checkunknowndirs = _unknowndirschecker()
796 checkunknowndirs = _unknowndirschecker()
797 for f, (m, args, msg) in actions.iteritems():
797 for f, (m, args, msg) in actions.iteritems():
798 if m in (ACTION_CREATED, ACTION_DELETED_CHANGED):
798 if m in (ACTION_CREATED, ACTION_DELETED_CHANGED):
799 if _checkunknownfile(repo, wctx, mctx, f):
799 if _checkunknownfile(repo, wctx, mctx, f):
800 fileconflicts.add(f)
800 fileconflicts.add(f)
801 elif pathconfig and f not in wctx:
801 elif pathconfig and f not in wctx:
802 path = checkunknowndirs(repo, wctx, f)
802 path = checkunknowndirs(repo, wctx, f)
803 if path is not None:
803 if path is not None:
804 pathconflicts.add(path)
804 pathconflicts.add(path)
805 elif m == ACTION_LOCAL_DIR_RENAME_GET:
805 elif m == ACTION_LOCAL_DIR_RENAME_GET:
806 if _checkunknownfile(repo, wctx, mctx, f, args[0]):
806 if _checkunknownfile(repo, wctx, mctx, f, args[0]):
807 fileconflicts.add(f)
807 fileconflicts.add(f)
808
808
809 allconflicts = fileconflicts | pathconflicts
809 allconflicts = fileconflicts | pathconflicts
810 ignoredconflicts = set([c for c in allconflicts
810 ignoredconflicts = set([c for c in allconflicts
811 if repo.dirstate._ignore(c)])
811 if repo.dirstate._ignore(c)])
812 unknownconflicts = allconflicts - ignoredconflicts
812 unknownconflicts = allconflicts - ignoredconflicts
813 collectconflicts(ignoredconflicts, ignoredconfig)
813 collectconflicts(ignoredconflicts, ignoredconfig)
814 collectconflicts(unknownconflicts, unknownconfig)
814 collectconflicts(unknownconflicts, unknownconfig)
815 else:
815 else:
816 for f, (m, args, msg) in actions.iteritems():
816 for f, (m, args, msg) in actions.iteritems():
817 if m == ACTION_CREATED_MERGE:
817 if m == ACTION_CREATED_MERGE:
818 fl2, anc = args
818 fl2, anc = args
819 different = _checkunknownfile(repo, wctx, mctx, f)
819 different = _checkunknownfile(repo, wctx, mctx, f)
820 if repo.dirstate._ignore(f):
820 if repo.dirstate._ignore(f):
821 config = ignoredconfig
821 config = ignoredconfig
822 else:
822 else:
823 config = unknownconfig
823 config = unknownconfig
824
824
825 # The behavior when force is True is described by this table:
825 # The behavior when force is True is described by this table:
826 # config different mergeforce | action backup
826 # config different mergeforce | action backup
827 # * n * | get n
827 # * n * | get n
828 # * y y | merge -
828 # * y y | merge -
829 # abort y n | merge - (1)
829 # abort y n | merge - (1)
830 # warn y n | warn + get y
830 # warn y n | warn + get y
831 # ignore y n | get y
831 # ignore y n | get y
832 #
832 #
833 # (1) this is probably the wrong behavior here -- we should
833 # (1) this is probably the wrong behavior here -- we should
834 # probably abort, but some actions like rebases currently
834 # probably abort, but some actions like rebases currently
835 # don't like an abort happening in the middle of
835 # don't like an abort happening in the middle of
836 # merge.update.
836 # merge.update.
837 if not different:
837 if not different:
838 actions[f] = (ACTION_GET, (fl2, False), 'remote created')
838 actions[f] = (ACTION_GET, (fl2, False), 'remote created')
839 elif mergeforce or config == 'abort':
839 elif mergeforce or config == 'abort':
840 actions[f] = (ACTION_MERGE, (f, f, None, False, anc),
840 actions[f] = (ACTION_MERGE, (f, f, None, False, anc),
841 'remote differs from untracked local')
841 'remote differs from untracked local')
842 elif config == 'abort':
842 elif config == 'abort':
843 abortconflicts.add(f)
843 abortconflicts.add(f)
844 else:
844 else:
845 if config == 'warn':
845 if config == 'warn':
846 warnconflicts.add(f)
846 warnconflicts.add(f)
847 actions[f] = (ACTION_GET, (fl2, True), 'remote created')
847 actions[f] = (ACTION_GET, (fl2, True), 'remote created')
848
848
849 for f in sorted(abortconflicts):
849 for f in sorted(abortconflicts):
850 warn = repo.ui.warn
850 warn = repo.ui.warn
851 if f in pathconflicts:
851 if f in pathconflicts:
852 if repo.wvfs.isfileorlink(f):
852 if repo.wvfs.isfileorlink(f):
853 warn(_("%s: untracked file conflicts with directory\n") % f)
853 warn(_("%s: untracked file conflicts with directory\n") % f)
854 else:
854 else:
855 warn(_("%s: untracked directory conflicts with file\n") % f)
855 warn(_("%s: untracked directory conflicts with file\n") % f)
856 else:
856 else:
857 warn(_("%s: untracked file differs\n") % f)
857 warn(_("%s: untracked file differs\n") % f)
858 if abortconflicts:
858 if abortconflicts:
859 raise error.Abort(_("untracked files in working directory "
859 raise error.Abort(_("untracked files in working directory "
860 "differ from files in requested revision"))
860 "differ from files in requested revision"))
861
861
862 for f in sorted(warnconflicts):
862 for f in sorted(warnconflicts):
863 if repo.wvfs.isfileorlink(f):
863 if repo.wvfs.isfileorlink(f):
864 repo.ui.warn(_("%s: replacing untracked file\n") % f)
864 repo.ui.warn(_("%s: replacing untracked file\n") % f)
865 else:
865 else:
866 repo.ui.warn(_("%s: replacing untracked files in directory\n") % f)
866 repo.ui.warn(_("%s: replacing untracked files in directory\n") % f)
867
867
868 for f, (m, args, msg) in actions.iteritems():
868 for f, (m, args, msg) in actions.iteritems():
869 if m == ACTION_CREATED:
869 if m == ACTION_CREATED:
870 backup = (f in fileconflicts or f in pathconflicts or
870 backup = (f in fileconflicts or f in pathconflicts or
871 any(p in pathconflicts for p in util.finddirs(f)))
871 any(p in pathconflicts for p in util.finddirs(f)))
872 flags, = args
872 flags, = args
873 actions[f] = (ACTION_GET, (flags, backup), msg)
873 actions[f] = (ACTION_GET, (flags, backup), msg)
874
874
875 def _forgetremoved(wctx, mctx, branchmerge):
875 def _forgetremoved(wctx, mctx, branchmerge):
876 """
876 """
877 Forget removed files
877 Forget removed files
878
878
879 If we're jumping between revisions (as opposed to merging), and if
879 If we're jumping between revisions (as opposed to merging), and if
880 neither the working directory nor the target rev has the file,
880 neither the working directory nor the target rev has the file,
881 then we need to remove it from the dirstate, to prevent the
881 then we need to remove it from the dirstate, to prevent the
882 dirstate from listing the file when it is no longer in the
882 dirstate from listing the file when it is no longer in the
883 manifest.
883 manifest.
884
884
885 If we're merging, and the other revision has removed a file
885 If we're merging, and the other revision has removed a file
886 that is not present in the working directory, we need to mark it
886 that is not present in the working directory, we need to mark it
887 as removed.
887 as removed.
888 """
888 """
889
889
890 actions = {}
890 actions = {}
891 m = ACTION_FORGET
891 m = ACTION_FORGET
892 if branchmerge:
892 if branchmerge:
893 m = ACTION_REMOVE
893 m = ACTION_REMOVE
894 for f in wctx.deleted():
894 for f in wctx.deleted():
895 if f not in mctx:
895 if f not in mctx:
896 actions[f] = m, None, "forget deleted"
896 actions[f] = m, None, "forget deleted"
897
897
898 if not branchmerge:
898 if not branchmerge:
899 for f in wctx.removed():
899 for f in wctx.removed():
900 if f not in mctx:
900 if f not in mctx:
901 actions[f] = ACTION_FORGET, None, "forget removed"
901 actions[f] = ACTION_FORGET, None, "forget removed"
902
902
903 return actions
903 return actions
904
904
905 def _checkcollision(repo, wmf, actions):
905 def _checkcollision(repo, wmf, actions):
906 """
906 """
907 Check for case-folding collisions.
907 Check for case-folding collisions.
908 """
908 """
909
909
910 # If the repo is narrowed, filter out files outside the narrowspec.
910 # If the repo is narrowed, filter out files outside the narrowspec.
911 narrowmatch = repo.narrowmatch()
911 narrowmatch = repo.narrowmatch()
912 if not narrowmatch.always():
912 if not narrowmatch.always():
913 wmf = wmf.matches(narrowmatch)
913 wmf = wmf.matches(narrowmatch)
914 if actions:
914 if actions:
915 narrowactions = {}
915 narrowactions = {}
916 for m, actionsfortype in actions.iteritems():
916 for m, actionsfortype in actions.iteritems():
917 narrowactions[m] = []
917 narrowactions[m] = []
918 for (f, args, msg) in actionsfortype:
918 for (f, args, msg) in actionsfortype:
919 if narrowmatch(f):
919 if narrowmatch(f):
920 narrowactions[m].append((f, args, msg))
920 narrowactions[m].append((f, args, msg))
921 actions = narrowactions
921 actions = narrowactions
922
922
923 # build provisional merged manifest up
923 # build provisional merged manifest up
924 pmmf = set(wmf)
924 pmmf = set(wmf)
925
925
926 if actions:
926 if actions:
927 # KEEP and EXEC are no-op
927 # KEEP and EXEC are no-op
928 for m in (ACTION_ADD, ACTION_ADD_MODIFIED, ACTION_FORGET, ACTION_GET,
928 for m in (ACTION_ADD, ACTION_ADD_MODIFIED, ACTION_FORGET, ACTION_GET,
929 ACTION_CHANGED_DELETED, ACTION_DELETED_CHANGED):
929 ACTION_CHANGED_DELETED, ACTION_DELETED_CHANGED):
930 for f, args, msg in actions[m]:
930 for f, args, msg in actions[m]:
931 pmmf.add(f)
931 pmmf.add(f)
932 for f, args, msg in actions[ACTION_REMOVE]:
932 for f, args, msg in actions[ACTION_REMOVE]:
933 pmmf.discard(f)
933 pmmf.discard(f)
934 for f, args, msg in actions[ACTION_DIR_RENAME_MOVE_LOCAL]:
934 for f, args, msg in actions[ACTION_DIR_RENAME_MOVE_LOCAL]:
935 f2, flags = args
935 f2, flags = args
936 pmmf.discard(f2)
936 pmmf.discard(f2)
937 pmmf.add(f)
937 pmmf.add(f)
938 for f, args, msg in actions[ACTION_LOCAL_DIR_RENAME_GET]:
938 for f, args, msg in actions[ACTION_LOCAL_DIR_RENAME_GET]:
939 pmmf.add(f)
939 pmmf.add(f)
940 for f, args, msg in actions[ACTION_MERGE]:
940 for f, args, msg in actions[ACTION_MERGE]:
941 f1, f2, fa, move, anc = args
941 f1, f2, fa, move, anc = args
942 if move:
942 if move:
943 pmmf.discard(f1)
943 pmmf.discard(f1)
944 pmmf.add(f)
944 pmmf.add(f)
945
945
946 # check case-folding collision in provisional merged manifest
946 # check case-folding collision in provisional merged manifest
947 foldmap = {}
947 foldmap = {}
948 for f in pmmf:
948 for f in pmmf:
949 fold = util.normcase(f)
949 fold = util.normcase(f)
950 if fold in foldmap:
950 if fold in foldmap:
951 raise error.Abort(_("case-folding collision between %s and %s")
951 raise error.Abort(_("case-folding collision between %s and %s")
952 % (f, foldmap[fold]))
952 % (f, foldmap[fold]))
953 foldmap[fold] = f
953 foldmap[fold] = f
954
954
955 # check case-folding of directories
955 # check case-folding of directories
956 foldprefix = unfoldprefix = lastfull = ''
956 foldprefix = unfoldprefix = lastfull = ''
957 for fold, f in sorted(foldmap.items()):
957 for fold, f in sorted(foldmap.items()):
958 if fold.startswith(foldprefix) and not f.startswith(unfoldprefix):
958 if fold.startswith(foldprefix) and not f.startswith(unfoldprefix):
959 # the folded prefix matches but actual casing is different
959 # the folded prefix matches but actual casing is different
960 raise error.Abort(_("case-folding collision between "
960 raise error.Abort(_("case-folding collision between "
961 "%s and directory of %s") % (lastfull, f))
961 "%s and directory of %s") % (lastfull, f))
962 foldprefix = fold + '/'
962 foldprefix = fold + '/'
963 unfoldprefix = f + '/'
963 unfoldprefix = f + '/'
964 lastfull = f
964 lastfull = f
965
965
966 def driverpreprocess(repo, ms, wctx, labels=None):
966 def driverpreprocess(repo, ms, wctx, labels=None):
967 """run the preprocess step of the merge driver, if any
967 """run the preprocess step of the merge driver, if any
968
968
969 This is currently not implemented -- it's an extension point."""
969 This is currently not implemented -- it's an extension point."""
970 return True
970 return True
971
971
972 def driverconclude(repo, ms, wctx, labels=None):
972 def driverconclude(repo, ms, wctx, labels=None):
973 """run the conclude step of the merge driver, if any
973 """run the conclude step of the merge driver, if any
974
974
975 This is currently not implemented -- it's an extension point."""
975 This is currently not implemented -- it's an extension point."""
976 return True
976 return True
977
977
978 def _filesindirs(repo, manifest, dirs):
978 def _filesindirs(repo, manifest, dirs):
979 """
979 """
980 Generator that yields pairs of all the files in the manifest that are found
980 Generator that yields pairs of all the files in the manifest that are found
981 inside the directories listed in dirs, and which directory they are found
981 inside the directories listed in dirs, and which directory they are found
982 in.
982 in.
983 """
983 """
984 for f in manifest:
984 for f in manifest:
985 for p in util.finddirs(f):
985 for p in util.finddirs(f):
986 if p in dirs:
986 if p in dirs:
987 yield f, p
987 yield f, p
988 break
988 break
989
989
990 def checkpathconflicts(repo, wctx, mctx, actions):
990 def checkpathconflicts(repo, wctx, mctx, actions):
991 """
991 """
992 Check if any actions introduce path conflicts in the repository, updating
992 Check if any actions introduce path conflicts in the repository, updating
993 actions to record or handle the path conflict accordingly.
993 actions to record or handle the path conflict accordingly.
994 """
994 """
995 mf = wctx.manifest()
995 mf = wctx.manifest()
996
996
997 # The set of local files that conflict with a remote directory.
997 # The set of local files that conflict with a remote directory.
998 localconflicts = set()
998 localconflicts = set()
999
999
1000 # The set of directories that conflict with a remote file, and so may cause
1000 # The set of directories that conflict with a remote file, and so may cause
1001 # conflicts if they still contain any files after the merge.
1001 # conflicts if they still contain any files after the merge.
1002 remoteconflicts = set()
1002 remoteconflicts = set()
1003
1003
1004 # The set of directories that appear as both a file and a directory in the
1004 # The set of directories that appear as both a file and a directory in the
1005 # remote manifest. These indicate an invalid remote manifest, which
1005 # remote manifest. These indicate an invalid remote manifest, which
1006 # can't be updated to cleanly.
1006 # can't be updated to cleanly.
1007 invalidconflicts = set()
1007 invalidconflicts = set()
1008
1008
1009 # The set of directories that contain files that are being created.
1009 # The set of directories that contain files that are being created.
1010 createdfiledirs = set()
1010 createdfiledirs = set()
1011
1011
1012 # The set of files deleted by all the actions.
1012 # The set of files deleted by all the actions.
1013 deletedfiles = set()
1013 deletedfiles = set()
1014
1014
1015 for f, (m, args, msg) in actions.items():
1015 for f, (m, args, msg) in actions.items():
1016 if m in (ACTION_CREATED, ACTION_DELETED_CHANGED, ACTION_MERGE,
1016 if m in (ACTION_CREATED, ACTION_DELETED_CHANGED, ACTION_MERGE,
1017 ACTION_CREATED_MERGE):
1017 ACTION_CREATED_MERGE):
1018 # This action may create a new local file.
1018 # This action may create a new local file.
1019 createdfiledirs.update(util.finddirs(f))
1019 createdfiledirs.update(util.finddirs(f))
1020 if mf.hasdir(f):
1020 if mf.hasdir(f):
1021 # The file aliases a local directory. This might be ok if all
1021 # The file aliases a local directory. This might be ok if all
1022 # the files in the local directory are being deleted. This
1022 # the files in the local directory are being deleted. This
1023 # will be checked once we know what all the deleted files are.
1023 # will be checked once we know what all the deleted files are.
1024 remoteconflicts.add(f)
1024 remoteconflicts.add(f)
1025 # Track the names of all deleted files.
1025 # Track the names of all deleted files.
1026 if m == ACTION_REMOVE:
1026 if m == ACTION_REMOVE:
1027 deletedfiles.add(f)
1027 deletedfiles.add(f)
1028 if m == ACTION_MERGE:
1028 if m == ACTION_MERGE:
1029 f1, f2, fa, move, anc = args
1029 f1, f2, fa, move, anc = args
1030 if move:
1030 if move:
1031 deletedfiles.add(f1)
1031 deletedfiles.add(f1)
1032 if m == ACTION_DIR_RENAME_MOVE_LOCAL:
1032 if m == ACTION_DIR_RENAME_MOVE_LOCAL:
1033 f2, flags = args
1033 f2, flags = args
1034 deletedfiles.add(f2)
1034 deletedfiles.add(f2)
1035
1035
1036 # Check all directories that contain created files for path conflicts.
1036 # Check all directories that contain created files for path conflicts.
1037 for p in createdfiledirs:
1037 for p in createdfiledirs:
1038 if p in mf:
1038 if p in mf:
1039 if p in mctx:
1039 if p in mctx:
1040 # A file is in a directory which aliases both a local
1040 # A file is in a directory which aliases both a local
1041 # and a remote file. This is an internal inconsistency
1041 # and a remote file. This is an internal inconsistency
1042 # within the remote manifest.
1042 # within the remote manifest.
1043 invalidconflicts.add(p)
1043 invalidconflicts.add(p)
1044 else:
1044 else:
1045 # A file is in a directory which aliases a local file.
1045 # A file is in a directory which aliases a local file.
1046 # We will need to rename the local file.
1046 # We will need to rename the local file.
1047 localconflicts.add(p)
1047 localconflicts.add(p)
1048 if p in actions and actions[p][0] in (ACTION_CREATED,
1048 if p in actions and actions[p][0] in (ACTION_CREATED,
1049 ACTION_DELETED_CHANGED,
1049 ACTION_DELETED_CHANGED,
1050 ACTION_MERGE,
1050 ACTION_MERGE,
1051 ACTION_CREATED_MERGE):
1051 ACTION_CREATED_MERGE):
1052 # The file is in a directory which aliases a remote file.
1052 # The file is in a directory which aliases a remote file.
1053 # This is an internal inconsistency within the remote
1053 # This is an internal inconsistency within the remote
1054 # manifest.
1054 # manifest.
1055 invalidconflicts.add(p)
1055 invalidconflicts.add(p)
1056
1056
1057 # Rename all local conflicting files that have not been deleted.
1057 # Rename all local conflicting files that have not been deleted.
1058 for p in localconflicts:
1058 for p in localconflicts:
1059 if p not in deletedfiles:
1059 if p not in deletedfiles:
1060 ctxname = bytes(wctx).rstrip('+')
1060 ctxname = bytes(wctx).rstrip('+')
1061 pnew = util.safename(p, ctxname, wctx, set(actions.keys()))
1061 pnew = util.safename(p, ctxname, wctx, set(actions.keys()))
1062 actions[pnew] = (ACTION_PATH_CONFLICT_RESOLVE, (p,),
1062 actions[pnew] = (ACTION_PATH_CONFLICT_RESOLVE, (p,),
1063 'local path conflict')
1063 'local path conflict')
1064 actions[p] = (ACTION_PATH_CONFLICT, (pnew, 'l'),
1064 actions[p] = (ACTION_PATH_CONFLICT, (pnew, 'l'),
1065 'path conflict')
1065 'path conflict')
1066
1066
1067 if remoteconflicts:
1067 if remoteconflicts:
1068 # Check if all files in the conflicting directories have been removed.
1068 # Check if all files in the conflicting directories have been removed.
1069 ctxname = bytes(mctx).rstrip('+')
1069 ctxname = bytes(mctx).rstrip('+')
1070 for f, p in _filesindirs(repo, mf, remoteconflicts):
1070 for f, p in _filesindirs(repo, mf, remoteconflicts):
1071 if f not in deletedfiles:
1071 if f not in deletedfiles:
1072 m, args, msg = actions[p]
1072 m, args, msg = actions[p]
1073 pnew = util.safename(p, ctxname, wctx, set(actions.keys()))
1073 pnew = util.safename(p, ctxname, wctx, set(actions.keys()))
1074 if m in (ACTION_DELETED_CHANGED, ACTION_MERGE):
1074 if m in (ACTION_DELETED_CHANGED, ACTION_MERGE):
1075 # Action was merge, just update target.
1075 # Action was merge, just update target.
1076 actions[pnew] = (m, args, msg)
1076 actions[pnew] = (m, args, msg)
1077 else:
1077 else:
1078 # Action was create, change to renamed get action.
1078 # Action was create, change to renamed get action.
1079 fl = args[0]
1079 fl = args[0]
1080 actions[pnew] = (ACTION_LOCAL_DIR_RENAME_GET, (p, fl),
1080 actions[pnew] = (ACTION_LOCAL_DIR_RENAME_GET, (p, fl),
1081 'remote path conflict')
1081 'remote path conflict')
1082 actions[p] = (ACTION_PATH_CONFLICT, (pnew, ACTION_REMOVE),
1082 actions[p] = (ACTION_PATH_CONFLICT, (pnew, ACTION_REMOVE),
1083 'path conflict')
1083 'path conflict')
1084 remoteconflicts.remove(p)
1084 remoteconflicts.remove(p)
1085 break
1085 break
1086
1086
1087 if invalidconflicts:
1087 if invalidconflicts:
1088 for p in invalidconflicts:
1088 for p in invalidconflicts:
1089 repo.ui.warn(_("%s: is both a file and a directory\n") % p)
1089 repo.ui.warn(_("%s: is both a file and a directory\n") % p)
1090 raise error.Abort(_("destination manifest contains path conflicts"))
1090 raise error.Abort(_("destination manifest contains path conflicts"))
1091
1091
1092 def _filternarrowactions(narrowmatch, branchmerge, actions):
1092 def _filternarrowactions(narrowmatch, branchmerge, actions):
1093 """
1093 """
1094 Filters out actions that can ignored because the repo is narrowed.
1094 Filters out actions that can ignored because the repo is narrowed.
1095
1095
1096 Raise an exception if the merge cannot be completed because the repo is
1096 Raise an exception if the merge cannot be completed because the repo is
1097 narrowed.
1097 narrowed.
1098 """
1098 """
1099 nooptypes = set(['k']) # TODO: handle with nonconflicttypes
1099 nooptypes = set(['k']) # TODO: handle with nonconflicttypes
1100 nonconflicttypes = set('a am c cm f g r e'.split())
1100 nonconflicttypes = set('a am c cm f g r e'.split())
1101 # We mutate the items in the dict during iteration, so iterate
1101 # We mutate the items in the dict during iteration, so iterate
1102 # over a copy.
1102 # over a copy.
1103 for f, action in list(actions.items()):
1103 for f, action in list(actions.items()):
1104 if narrowmatch(f):
1104 if narrowmatch(f):
1105 pass
1105 pass
1106 elif not branchmerge:
1106 elif not branchmerge:
1107 del actions[f] # just updating, ignore changes outside clone
1107 del actions[f] # just updating, ignore changes outside clone
1108 elif action[0] in nooptypes:
1108 elif action[0] in nooptypes:
1109 del actions[f] # merge does not affect file
1109 del actions[f] # merge does not affect file
1110 elif action[0] in nonconflicttypes:
1110 elif action[0] in nonconflicttypes:
1111 raise error.Abort(_('merge affects file \'%s\' outside narrow, '
1111 raise error.Abort(_('merge affects file \'%s\' outside narrow, '
1112 'which is not yet supported') % f,
1112 'which is not yet supported') % f,
1113 hint=_('merging in the other direction '
1113 hint=_('merging in the other direction '
1114 'may work'))
1114 'may work'))
1115 else:
1115 else:
1116 raise error.Abort(_('conflict in file \'%s\' is outside '
1116 raise error.Abort(_('conflict in file \'%s\' is outside '
1117 'narrow clone') % f)
1117 'narrow clone') % f)
1118
1118
1119 def manifestmerge(repo, wctx, p2, pa, branchmerge, force, matcher,
1119 def manifestmerge(repo, wctx, p2, pa, branchmerge, force, matcher,
1120 acceptremote, followcopies, forcefulldiff=False):
1120 acceptremote, followcopies, forcefulldiff=False):
1121 """
1121 """
1122 Merge wctx and p2 with ancestor pa and generate merge action list
1122 Merge wctx and p2 with ancestor pa and generate merge action list
1123
1123
1124 branchmerge and force are as passed in to update
1124 branchmerge and force are as passed in to update
1125 matcher = matcher to filter file lists
1125 matcher = matcher to filter file lists
1126 acceptremote = accept the incoming changes without prompting
1126 acceptremote = accept the incoming changes without prompting
1127 """
1127 """
1128 if matcher is not None and matcher.always():
1128 if matcher is not None and matcher.always():
1129 matcher = None
1129 matcher = None
1130
1130
1131 copy, movewithdir, diverge, renamedelete, dirmove = {}, {}, {}, {}, {}
1131 copy, movewithdir, diverge, renamedelete, dirmove = {}, {}, {}, {}, {}
1132
1132
1133 # manifests fetched in order are going to be faster, so prime the caches
1133 # manifests fetched in order are going to be faster, so prime the caches
1134 [x.manifest() for x in
1134 [x.manifest() for x in
1135 sorted(wctx.parents() + [p2, pa], key=scmutil.intrev)]
1135 sorted(wctx.parents() + [p2, pa], key=scmutil.intrev)]
1136
1136
1137 if followcopies:
1137 if followcopies:
1138 ret = copies.mergecopies(repo, wctx, p2, pa)
1138 ret = copies.mergecopies(repo, wctx, p2, pa)
1139 copy, movewithdir, diverge, renamedelete, dirmove = ret
1139 copy, movewithdir, diverge, renamedelete, dirmove = ret
1140
1140
1141 boolbm = pycompat.bytestr(bool(branchmerge))
1141 boolbm = pycompat.bytestr(bool(branchmerge))
1142 boolf = pycompat.bytestr(bool(force))
1142 boolf = pycompat.bytestr(bool(force))
1143 boolm = pycompat.bytestr(bool(matcher))
1143 boolm = pycompat.bytestr(bool(matcher))
1144 repo.ui.note(_("resolving manifests\n"))
1144 repo.ui.note(_("resolving manifests\n"))
1145 repo.ui.debug(" branchmerge: %s, force: %s, partial: %s\n"
1145 repo.ui.debug(" branchmerge: %s, force: %s, partial: %s\n"
1146 % (boolbm, boolf, boolm))
1146 % (boolbm, boolf, boolm))
1147 repo.ui.debug(" ancestor: %s, local: %s, remote: %s\n" % (pa, wctx, p2))
1147 repo.ui.debug(" ancestor: %s, local: %s, remote: %s\n" % (pa, wctx, p2))
1148
1148
1149 m1, m2, ma = wctx.manifest(), p2.manifest(), pa.manifest()
1149 m1, m2, ma = wctx.manifest(), p2.manifest(), pa.manifest()
1150 copied = set(copy.values())
1150 copied = set(copy.values())
1151 copied.update(movewithdir.values())
1151 copied.update(movewithdir.values())
1152
1152
1153 if '.hgsubstate' in m1 and wctx.rev() is None:
1153 if '.hgsubstate' in m1 and wctx.rev() is None:
1154 # Check whether sub state is modified, and overwrite the manifest
1154 # Check whether sub state is modified, and overwrite the manifest
1155 # to flag the change. If wctx is a committed revision, we shouldn't
1155 # to flag the change. If wctx is a committed revision, we shouldn't
1156 # care for the dirty state of the working directory.
1156 # care for the dirty state of the working directory.
1157 if any(wctx.sub(s).dirty() for s in wctx.substate):
1157 if any(wctx.sub(s).dirty() for s in wctx.substate):
1158 m1['.hgsubstate'] = modifiednodeid
1158 m1['.hgsubstate'] = modifiednodeid
1159
1159
1160 # Don't use m2-vs-ma optimization if:
1160 # Don't use m2-vs-ma optimization if:
1161 # - ma is the same as m1 or m2, which we're just going to diff again later
1161 # - ma is the same as m1 or m2, which we're just going to diff again later
1162 # - The caller specifically asks for a full diff, which is useful during bid
1162 # - The caller specifically asks for a full diff, which is useful during bid
1163 # merge.
1163 # merge.
1164 if (pa not in ([wctx, p2] + wctx.parents()) and not forcefulldiff):
1164 if (pa not in ([wctx, p2] + wctx.parents()) and not forcefulldiff):
1165 # Identify which files are relevant to the merge, so we can limit the
1165 # Identify which files are relevant to the merge, so we can limit the
1166 # total m1-vs-m2 diff to just those files. This has significant
1166 # total m1-vs-m2 diff to just those files. This has significant
1167 # performance benefits in large repositories.
1167 # performance benefits in large repositories.
1168 relevantfiles = set(ma.diff(m2).keys())
1168 relevantfiles = set(ma.diff(m2).keys())
1169
1169
1170 # For copied and moved files, we need to add the source file too.
1170 # For copied and moved files, we need to add the source file too.
1171 for copykey, copyvalue in copy.iteritems():
1171 for copykey, copyvalue in copy.iteritems():
1172 if copyvalue in relevantfiles:
1172 if copyvalue in relevantfiles:
1173 relevantfiles.add(copykey)
1173 relevantfiles.add(copykey)
1174 for movedirkey in movewithdir:
1174 for movedirkey in movewithdir:
1175 relevantfiles.add(movedirkey)
1175 relevantfiles.add(movedirkey)
1176 filesmatcher = scmutil.matchfiles(repo, relevantfiles)
1176 filesmatcher = scmutil.matchfiles(repo, relevantfiles)
1177 matcher = matchmod.intersectmatchers(matcher, filesmatcher)
1177 matcher = matchmod.intersectmatchers(matcher, filesmatcher)
1178
1178
1179 diff = m1.diff(m2, match=matcher)
1179 diff = m1.diff(m2, match=matcher)
1180
1180
1181 if matcher is None:
1181 if matcher is None:
1182 matcher = matchmod.always('', '')
1182 matcher = matchmod.always('', '')
1183
1183
1184 actions = {}
1184 actions = {}
1185 for f, ((n1, fl1), (n2, fl2)) in diff.iteritems():
1185 for f, ((n1, fl1), (n2, fl2)) in diff.iteritems():
1186 if n1 and n2: # file exists on both local and remote side
1186 if n1 and n2: # file exists on both local and remote side
1187 if f not in ma:
1187 if f not in ma:
1188 fa = copy.get(f, None)
1188 fa = copy.get(f, None)
1189 if fa is not None:
1189 if fa is not None:
1190 actions[f] = (ACTION_MERGE, (f, f, fa, False, pa.node()),
1190 actions[f] = (ACTION_MERGE, (f, f, fa, False, pa.node()),
1191 'both renamed from %s' % fa)
1191 'both renamed from %s' % fa)
1192 else:
1192 else:
1193 actions[f] = (ACTION_MERGE, (f, f, None, False, pa.node()),
1193 actions[f] = (ACTION_MERGE, (f, f, None, False, pa.node()),
1194 'both created')
1194 'both created')
1195 else:
1195 else:
1196 a = ma[f]
1196 a = ma[f]
1197 fla = ma.flags(f)
1197 fla = ma.flags(f)
1198 nol = 'l' not in fl1 + fl2 + fla
1198 nol = 'l' not in fl1 + fl2 + fla
1199 if n2 == a and fl2 == fla:
1199 if n2 == a and fl2 == fla:
1200 actions[f] = (ACTION_KEEP, (), 'remote unchanged')
1200 actions[f] = (ACTION_KEEP, (), 'remote unchanged')
1201 elif n1 == a and fl1 == fla: # local unchanged - use remote
1201 elif n1 == a and fl1 == fla: # local unchanged - use remote
1202 if n1 == n2: # optimization: keep local content
1202 if n1 == n2: # optimization: keep local content
1203 actions[f] = (ACTION_EXEC, (fl2,), 'update permissions')
1203 actions[f] = (ACTION_EXEC, (fl2,), 'update permissions')
1204 else:
1204 else:
1205 actions[f] = (ACTION_GET, (fl2, False),
1205 actions[f] = (ACTION_GET, (fl2, False),
1206 'remote is newer')
1206 'remote is newer')
1207 elif nol and n2 == a: # remote only changed 'x'
1207 elif nol and n2 == a: # remote only changed 'x'
1208 actions[f] = (ACTION_EXEC, (fl2,), 'update permissions')
1208 actions[f] = (ACTION_EXEC, (fl2,), 'update permissions')
1209 elif nol and n1 == a: # local only changed 'x'
1209 elif nol and n1 == a: # local only changed 'x'
1210 actions[f] = (ACTION_GET, (fl1, False), 'remote is newer')
1210 actions[f] = (ACTION_GET, (fl1, False), 'remote is newer')
1211 else: # both changed something
1211 else: # both changed something
1212 actions[f] = (ACTION_MERGE, (f, f, f, False, pa.node()),
1212 actions[f] = (ACTION_MERGE, (f, f, f, False, pa.node()),
1213 'versions differ')
1213 'versions differ')
1214 elif n1: # file exists only on local side
1214 elif n1: # file exists only on local side
1215 if f in copied:
1215 if f in copied:
1216 pass # we'll deal with it on m2 side
1216 pass # we'll deal with it on m2 side
1217 elif f in movewithdir: # directory rename, move local
1217 elif f in movewithdir: # directory rename, move local
1218 f2 = movewithdir[f]
1218 f2 = movewithdir[f]
1219 if f2 in m2:
1219 if f2 in m2:
1220 actions[f2] = (ACTION_MERGE, (f, f2, None, True, pa.node()),
1220 actions[f2] = (ACTION_MERGE, (f, f2, None, True, pa.node()),
1221 'remote directory rename, both created')
1221 'remote directory rename, both created')
1222 else:
1222 else:
1223 actions[f2] = (ACTION_DIR_RENAME_MOVE_LOCAL, (f, fl1),
1223 actions[f2] = (ACTION_DIR_RENAME_MOVE_LOCAL, (f, fl1),
1224 'remote directory rename - move from %s' % f)
1224 'remote directory rename - move from %s' % f)
1225 elif f in copy:
1225 elif f in copy:
1226 f2 = copy[f]
1226 f2 = copy[f]
1227 actions[f] = (ACTION_MERGE, (f, f2, f2, False, pa.node()),
1227 actions[f] = (ACTION_MERGE, (f, f2, f2, False, pa.node()),
1228 'local copied/moved from %s' % f2)
1228 'local copied/moved from %s' % f2)
1229 elif f in ma: # clean, a different, no remote
1229 elif f in ma: # clean, a different, no remote
1230 if n1 != ma[f]:
1230 if n1 != ma[f]:
1231 if acceptremote:
1231 if acceptremote:
1232 actions[f] = (ACTION_REMOVE, None, 'remote delete')
1232 actions[f] = (ACTION_REMOVE, None, 'remote delete')
1233 else:
1233 else:
1234 actions[f] = (ACTION_CHANGED_DELETED,
1234 actions[f] = (ACTION_CHANGED_DELETED,
1235 (f, None, f, False, pa.node()),
1235 (f, None, f, False, pa.node()),
1236 'prompt changed/deleted')
1236 'prompt changed/deleted')
1237 elif n1 == addednodeid:
1237 elif n1 == addednodeid:
1238 # This extra 'a' is added by working copy manifest to mark
1238 # This extra 'a' is added by working copy manifest to mark
1239 # the file as locally added. We should forget it instead of
1239 # the file as locally added. We should forget it instead of
1240 # deleting it.
1240 # deleting it.
1241 actions[f] = (ACTION_FORGET, None, 'remote deleted')
1241 actions[f] = (ACTION_FORGET, None, 'remote deleted')
1242 else:
1242 else:
1243 actions[f] = (ACTION_REMOVE, None, 'other deleted')
1243 actions[f] = (ACTION_REMOVE, None, 'other deleted')
1244 elif n2: # file exists only on remote side
1244 elif n2: # file exists only on remote side
1245 if f in copied:
1245 if f in copied:
1246 pass # we'll deal with it on m1 side
1246 pass # we'll deal with it on m1 side
1247 elif f in movewithdir:
1247 elif f in movewithdir:
1248 f2 = movewithdir[f]
1248 f2 = movewithdir[f]
1249 if f2 in m1:
1249 if f2 in m1:
1250 actions[f2] = (ACTION_MERGE,
1250 actions[f2] = (ACTION_MERGE,
1251 (f2, f, None, False, pa.node()),
1251 (f2, f, None, False, pa.node()),
1252 'local directory rename, both created')
1252 'local directory rename, both created')
1253 else:
1253 else:
1254 actions[f2] = (ACTION_LOCAL_DIR_RENAME_GET, (f, fl2),
1254 actions[f2] = (ACTION_LOCAL_DIR_RENAME_GET, (f, fl2),
1255 'local directory rename - get from %s' % f)
1255 'local directory rename - get from %s' % f)
1256 elif f in copy:
1256 elif f in copy:
1257 f2 = copy[f]
1257 f2 = copy[f]
1258 if f2 in m2:
1258 if f2 in m2:
1259 actions[f] = (ACTION_MERGE, (f2, f, f2, False, pa.node()),
1259 actions[f] = (ACTION_MERGE, (f2, f, f2, False, pa.node()),
1260 'remote copied from %s' % f2)
1260 'remote copied from %s' % f2)
1261 else:
1261 else:
1262 actions[f] = (ACTION_MERGE, (f2, f, f2, True, pa.node()),
1262 actions[f] = (ACTION_MERGE, (f2, f, f2, True, pa.node()),
1263 'remote moved from %s' % f2)
1263 'remote moved from %s' % f2)
1264 elif f not in ma:
1264 elif f not in ma:
1265 # local unknown, remote created: the logic is described by the
1265 # local unknown, remote created: the logic is described by the
1266 # following table:
1266 # following table:
1267 #
1267 #
1268 # force branchmerge different | action
1268 # force branchmerge different | action
1269 # n * * | create
1269 # n * * | create
1270 # y n * | create
1270 # y n * | create
1271 # y y n | create
1271 # y y n | create
1272 # y y y | merge
1272 # y y y | merge
1273 #
1273 #
1274 # Checking whether the files are different is expensive, so we
1274 # Checking whether the files are different is expensive, so we
1275 # don't do that when we can avoid it.
1275 # don't do that when we can avoid it.
1276 if not force:
1276 if not force:
1277 actions[f] = (ACTION_CREATED, (fl2,), 'remote created')
1277 actions[f] = (ACTION_CREATED, (fl2,), 'remote created')
1278 elif not branchmerge:
1278 elif not branchmerge:
1279 actions[f] = (ACTION_CREATED, (fl2,), 'remote created')
1279 actions[f] = (ACTION_CREATED, (fl2,), 'remote created')
1280 else:
1280 else:
1281 actions[f] = (ACTION_CREATED_MERGE, (fl2, pa.node()),
1281 actions[f] = (ACTION_CREATED_MERGE, (fl2, pa.node()),
1282 'remote created, get or merge')
1282 'remote created, get or merge')
1283 elif n2 != ma[f]:
1283 elif n2 != ma[f]:
1284 df = None
1284 df = None
1285 for d in dirmove:
1285 for d in dirmove:
1286 if f.startswith(d):
1286 if f.startswith(d):
1287 # new file added in a directory that was moved
1287 # new file added in a directory that was moved
1288 df = dirmove[d] + f[len(d):]
1288 df = dirmove[d] + f[len(d):]
1289 break
1289 break
1290 if df is not None and df in m1:
1290 if df is not None and df in m1:
1291 actions[df] = (ACTION_MERGE, (df, f, f, False, pa.node()),
1291 actions[df] = (ACTION_MERGE, (df, f, f, False, pa.node()),
1292 'local directory rename - respect move '
1292 'local directory rename - respect move '
1293 'from %s' % f)
1293 'from %s' % f)
1294 elif acceptremote:
1294 elif acceptremote:
1295 actions[f] = (ACTION_CREATED, (fl2,), 'remote recreating')
1295 actions[f] = (ACTION_CREATED, (fl2,), 'remote recreating')
1296 else:
1296 else:
1297 actions[f] = (ACTION_DELETED_CHANGED,
1297 actions[f] = (ACTION_DELETED_CHANGED,
1298 (None, f, f, False, pa.node()),
1298 (None, f, f, False, pa.node()),
1299 'prompt deleted/changed')
1299 'prompt deleted/changed')
1300
1300
1301 if repo.ui.configbool('experimental', 'merge.checkpathconflicts'):
1301 if repo.ui.configbool('experimental', 'merge.checkpathconflicts'):
1302 # If we are merging, look for path conflicts.
1302 # If we are merging, look for path conflicts.
1303 checkpathconflicts(repo, wctx, p2, actions)
1303 checkpathconflicts(repo, wctx, p2, actions)
1304
1304
1305 narrowmatch = repo.narrowmatch()
1305 narrowmatch = repo.narrowmatch()
1306 if not narrowmatch.always():
1306 if not narrowmatch.always():
1307 # Updates "actions" in place
1307 # Updates "actions" in place
1308 _filternarrowactions(narrowmatch, branchmerge, actions)
1308 _filternarrowactions(narrowmatch, branchmerge, actions)
1309
1309
1310 return actions, diverge, renamedelete
1310 return actions, diverge, renamedelete
1311
1311
1312 def _resolvetrivial(repo, wctx, mctx, ancestor, actions):
1312 def _resolvetrivial(repo, wctx, mctx, ancestor, actions):
1313 """Resolves false conflicts where the nodeid changed but the content
1313 """Resolves false conflicts where the nodeid changed but the content
1314 remained the same."""
1314 remained the same."""
1315 # We force a copy of actions.items() because we're going to mutate
1315 # We force a copy of actions.items() because we're going to mutate
1316 # actions as we resolve trivial conflicts.
1316 # actions as we resolve trivial conflicts.
1317 for f, (m, args, msg) in list(actions.items()):
1317 for f, (m, args, msg) in list(actions.items()):
1318 if (m == ACTION_CHANGED_DELETED and f in ancestor
1318 if (m == ACTION_CHANGED_DELETED and f in ancestor
1319 and not wctx[f].cmp(ancestor[f])):
1319 and not wctx[f].cmp(ancestor[f])):
1320 # local did change but ended up with same content
1320 # local did change but ended up with same content
1321 actions[f] = ACTION_REMOVE, None, 'prompt same'
1321 actions[f] = ACTION_REMOVE, None, 'prompt same'
1322 elif (m == ACTION_DELETED_CHANGED and f in ancestor
1322 elif (m == ACTION_DELETED_CHANGED and f in ancestor
1323 and not mctx[f].cmp(ancestor[f])):
1323 and not mctx[f].cmp(ancestor[f])):
1324 # remote did change but ended up with same content
1324 # remote did change but ended up with same content
1325 del actions[f] # don't get = keep local deleted
1325 del actions[f] # don't get = keep local deleted
1326
1326
1327 def calculateupdates(repo, wctx, mctx, ancestors, branchmerge, force,
1327 def calculateupdates(repo, wctx, mctx, ancestors, branchmerge, force,
1328 acceptremote, followcopies, matcher=None,
1328 acceptremote, followcopies, matcher=None,
1329 mergeforce=False):
1329 mergeforce=False):
1330 """Calculate the actions needed to merge mctx into wctx using ancestors"""
1330 """Calculate the actions needed to merge mctx into wctx using ancestors"""
1331 # Avoid cycle.
1331 # Avoid cycle.
1332 from . import sparse
1332 from . import sparse
1333
1333
1334 if len(ancestors) == 1: # default
1334 if len(ancestors) == 1: # default
1335 actions, diverge, renamedelete = manifestmerge(
1335 actions, diverge, renamedelete = manifestmerge(
1336 repo, wctx, mctx, ancestors[0], branchmerge, force, matcher,
1336 repo, wctx, mctx, ancestors[0], branchmerge, force, matcher,
1337 acceptremote, followcopies)
1337 acceptremote, followcopies)
1338 _checkunknownfiles(repo, wctx, mctx, force, actions, mergeforce)
1338 _checkunknownfiles(repo, wctx, mctx, force, actions, mergeforce)
1339
1339
1340 else: # only when merge.preferancestor=* - the default
1340 else: # only when merge.preferancestor=* - the default
1341 repo.ui.note(
1341 repo.ui.note(
1342 _("note: merging %s and %s using bids from ancestors %s\n") %
1342 _("note: merging %s and %s using bids from ancestors %s\n") %
1343 (wctx, mctx, _(' and ').join(pycompat.bytestr(anc)
1343 (wctx, mctx, _(' and ').join(pycompat.bytestr(anc)
1344 for anc in ancestors)))
1344 for anc in ancestors)))
1345
1345
1346 # Call for bids
1346 # Call for bids
1347 fbids = {} # mapping filename to bids (action method to list af actions)
1347 fbids = {} # mapping filename to bids (action method to list af actions)
1348 diverge, renamedelete = None, None
1348 diverge, renamedelete = None, None
1349 for ancestor in ancestors:
1349 for ancestor in ancestors:
1350 repo.ui.note(_('\ncalculating bids for ancestor %s\n') % ancestor)
1350 repo.ui.note(_('\ncalculating bids for ancestor %s\n') % ancestor)
1351 actions, diverge1, renamedelete1 = manifestmerge(
1351 actions, diverge1, renamedelete1 = manifestmerge(
1352 repo, wctx, mctx, ancestor, branchmerge, force, matcher,
1352 repo, wctx, mctx, ancestor, branchmerge, force, matcher,
1353 acceptremote, followcopies, forcefulldiff=True)
1353 acceptremote, followcopies, forcefulldiff=True)
1354 _checkunknownfiles(repo, wctx, mctx, force, actions, mergeforce)
1354 _checkunknownfiles(repo, wctx, mctx, force, actions, mergeforce)
1355
1355
1356 # Track the shortest set of warning on the theory that bid
1356 # Track the shortest set of warning on the theory that bid
1357 # merge will correctly incorporate more information
1357 # merge will correctly incorporate more information
1358 if diverge is None or len(diverge1) < len(diverge):
1358 if diverge is None or len(diverge1) < len(diverge):
1359 diverge = diverge1
1359 diverge = diverge1
1360 if renamedelete is None or len(renamedelete) < len(renamedelete1):
1360 if renamedelete is None or len(renamedelete) < len(renamedelete1):
1361 renamedelete = renamedelete1
1361 renamedelete = renamedelete1
1362
1362
1363 for f, a in sorted(actions.iteritems()):
1363 for f, a in sorted(actions.iteritems()):
1364 m, args, msg = a
1364 m, args, msg = a
1365 repo.ui.debug(' %s: %s -> %s\n' % (f, msg, m))
1365 repo.ui.debug(' %s: %s -> %s\n' % (f, msg, m))
1366 if f in fbids:
1366 if f in fbids:
1367 d = fbids[f]
1367 d = fbids[f]
1368 if m in d:
1368 if m in d:
1369 d[m].append(a)
1369 d[m].append(a)
1370 else:
1370 else:
1371 d[m] = [a]
1371 d[m] = [a]
1372 else:
1372 else:
1373 fbids[f] = {m: [a]}
1373 fbids[f] = {m: [a]}
1374
1374
1375 # Pick the best bid for each file
1375 # Pick the best bid for each file
1376 repo.ui.note(_('\nauction for merging merge bids\n'))
1376 repo.ui.note(_('\nauction for merging merge bids\n'))
1377 actions = {}
1377 actions = {}
1378 dms = [] # filenames that have dm actions
1378 dms = [] # filenames that have dm actions
1379 for f, bids in sorted(fbids.items()):
1379 for f, bids in sorted(fbids.items()):
1380 # bids is a mapping from action method to list af actions
1380 # bids is a mapping from action method to list af actions
1381 # Consensus?
1381 # Consensus?
1382 if len(bids) == 1: # all bids are the same kind of method
1382 if len(bids) == 1: # all bids are the same kind of method
1383 m, l = list(bids.items())[0]
1383 m, l = list(bids.items())[0]
1384 if all(a == l[0] for a in l[1:]): # len(bids) is > 1
1384 if all(a == l[0] for a in l[1:]): # len(bids) is > 1
1385 repo.ui.note(_(" %s: consensus for %s\n") % (f, m))
1385 repo.ui.note(_(" %s: consensus for %s\n") % (f, m))
1386 actions[f] = l[0]
1386 actions[f] = l[0]
1387 if m == ACTION_DIR_RENAME_MOVE_LOCAL:
1387 if m == ACTION_DIR_RENAME_MOVE_LOCAL:
1388 dms.append(f)
1388 dms.append(f)
1389 continue
1389 continue
1390 # If keep is an option, just do it.
1390 # If keep is an option, just do it.
1391 if ACTION_KEEP in bids:
1391 if ACTION_KEEP in bids:
1392 repo.ui.note(_(" %s: picking 'keep' action\n") % f)
1392 repo.ui.note(_(" %s: picking 'keep' action\n") % f)
1393 actions[f] = bids[ACTION_KEEP][0]
1393 actions[f] = bids[ACTION_KEEP][0]
1394 continue
1394 continue
1395 # If there are gets and they all agree [how could they not?], do it.
1395 # If there are gets and they all agree [how could they not?], do it.
1396 if ACTION_GET in bids:
1396 if ACTION_GET in bids:
1397 ga0 = bids[ACTION_GET][0]
1397 ga0 = bids[ACTION_GET][0]
1398 if all(a == ga0 for a in bids[ACTION_GET][1:]):
1398 if all(a == ga0 for a in bids[ACTION_GET][1:]):
1399 repo.ui.note(_(" %s: picking 'get' action\n") % f)
1399 repo.ui.note(_(" %s: picking 'get' action\n") % f)
1400 actions[f] = ga0
1400 actions[f] = ga0
1401 continue
1401 continue
1402 # TODO: Consider other simple actions such as mode changes
1402 # TODO: Consider other simple actions such as mode changes
1403 # Handle inefficient democrazy.
1403 # Handle inefficient democrazy.
1404 repo.ui.note(_(' %s: multiple bids for merge action:\n') % f)
1404 repo.ui.note(_(' %s: multiple bids for merge action:\n') % f)
1405 for m, l in sorted(bids.items()):
1405 for m, l in sorted(bids.items()):
1406 for _f, args, msg in l:
1406 for _f, args, msg in l:
1407 repo.ui.note(' %s -> %s\n' % (msg, m))
1407 repo.ui.note(' %s -> %s\n' % (msg, m))
1408 # Pick random action. TODO: Instead, prompt user when resolving
1408 # Pick random action. TODO: Instead, prompt user when resolving
1409 m, l = list(bids.items())[0]
1409 m, l = list(bids.items())[0]
1410 repo.ui.warn(_(' %s: ambiguous merge - picked %s action\n') %
1410 repo.ui.warn(_(' %s: ambiguous merge - picked %s action\n') %
1411 (f, m))
1411 (f, m))
1412 actions[f] = l[0]
1412 actions[f] = l[0]
1413 if m == ACTION_DIR_RENAME_MOVE_LOCAL:
1413 if m == ACTION_DIR_RENAME_MOVE_LOCAL:
1414 dms.append(f)
1414 dms.append(f)
1415 continue
1415 continue
1416 # Work around 'dm' that can cause multiple actions for the same file
1416 # Work around 'dm' that can cause multiple actions for the same file
1417 for f in dms:
1417 for f in dms:
1418 dm, (f0, flags), msg = actions[f]
1418 dm, (f0, flags), msg = actions[f]
1419 assert dm == ACTION_DIR_RENAME_MOVE_LOCAL, dm
1419 assert dm == ACTION_DIR_RENAME_MOVE_LOCAL, dm
1420 if f0 in actions and actions[f0][0] == ACTION_REMOVE:
1420 if f0 in actions and actions[f0][0] == ACTION_REMOVE:
1421 # We have one bid for removing a file and another for moving it.
1421 # We have one bid for removing a file and another for moving it.
1422 # These two could be merged as first move and then delete ...
1422 # These two could be merged as first move and then delete ...
1423 # but instead drop moving and just delete.
1423 # but instead drop moving and just delete.
1424 del actions[f]
1424 del actions[f]
1425 repo.ui.note(_('end of auction\n\n'))
1425 repo.ui.note(_('end of auction\n\n'))
1426
1426
1427 _resolvetrivial(repo, wctx, mctx, ancestors[0], actions)
1427 _resolvetrivial(repo, wctx, mctx, ancestors[0], actions)
1428
1428
1429 if wctx.rev() is None:
1429 if wctx.rev() is None:
1430 fractions = _forgetremoved(wctx, mctx, branchmerge)
1430 fractions = _forgetremoved(wctx, mctx, branchmerge)
1431 actions.update(fractions)
1431 actions.update(fractions)
1432
1432
1433 prunedactions = sparse.filterupdatesactions(repo, wctx, mctx, branchmerge,
1433 prunedactions = sparse.filterupdatesactions(repo, wctx, mctx, branchmerge,
1434 actions)
1434 actions)
1435
1435
1436 return prunedactions, diverge, renamedelete
1436 return prunedactions, diverge, renamedelete
1437
1437
1438 def _getcwd():
1438 def _getcwd():
1439 try:
1439 try:
1440 return pycompat.getcwd()
1440 return pycompat.getcwd()
1441 except OSError as err:
1441 except OSError as err:
1442 if err.errno == errno.ENOENT:
1442 if err.errno == errno.ENOENT:
1443 return None
1443 return None
1444 raise
1444 raise
1445
1445
1446 def batchremove(repo, wctx, actions):
1446 def batchremove(repo, wctx, actions):
1447 """apply removes to the working directory
1447 """apply removes to the working directory
1448
1448
1449 yields tuples for progress updates
1449 yields tuples for progress updates
1450 """
1450 """
1451 verbose = repo.ui.verbose
1451 verbose = repo.ui.verbose
1452 cwd = _getcwd()
1452 cwd = _getcwd()
1453 i = 0
1453 i = 0
1454 for f, args, msg in actions:
1454 for f, args, msg in actions:
1455 repo.ui.debug(" %s: %s -> r\n" % (f, msg))
1455 repo.ui.debug(" %s: %s -> r\n" % (f, msg))
1456 if verbose:
1456 if verbose:
1457 repo.ui.note(_("removing %s\n") % f)
1457 repo.ui.note(_("removing %s\n") % f)
1458 wctx[f].audit()
1458 wctx[f].audit()
1459 try:
1459 try:
1460 wctx[f].remove(ignoremissing=True)
1460 wctx[f].remove(ignoremissing=True)
1461 except OSError as inst:
1461 except OSError as inst:
1462 repo.ui.warn(_("update failed to remove %s: %s!\n") %
1462 repo.ui.warn(_("update failed to remove %s: %s!\n") %
1463 (f, inst.strerror))
1463 (f, inst.strerror))
1464 if i == 100:
1464 if i == 100:
1465 yield i, f
1465 yield i, f
1466 i = 0
1466 i = 0
1467 i += 1
1467 i += 1
1468 if i > 0:
1468 if i > 0:
1469 yield i, f
1469 yield i, f
1470
1470
1471 if cwd and not _getcwd():
1471 if cwd and not _getcwd():
1472 # cwd was removed in the course of removing files; print a helpful
1472 # cwd was removed in the course of removing files; print a helpful
1473 # warning.
1473 # warning.
1474 repo.ui.warn(_("current directory was removed\n"
1474 repo.ui.warn(_("current directory was removed\n"
1475 "(consider changing to repo root: %s)\n") % repo.root)
1475 "(consider changing to repo root: %s)\n") % repo.root)
1476
1476
1477 def batchget(repo, mctx, wctx, actions):
1477 def batchget(repo, mctx, wctx, actions):
1478 """apply gets to the working directory
1478 """apply gets to the working directory
1479
1479
1480 mctx is the context to get from
1480 mctx is the context to get from
1481
1481
1482 yields tuples for progress updates
1482 yields tuples for progress updates
1483 """
1483 """
1484 verbose = repo.ui.verbose
1484 verbose = repo.ui.verbose
1485 fctx = mctx.filectx
1485 fctx = mctx.filectx
1486 ui = repo.ui
1486 ui = repo.ui
1487 i = 0
1487 i = 0
1488 with repo.wvfs.backgroundclosing(ui, expectedcount=len(actions)):
1488 with repo.wvfs.backgroundclosing(ui, expectedcount=len(actions)):
1489 for f, (flags, backup), msg in actions:
1489 for f, (flags, backup), msg in actions:
1490 repo.ui.debug(" %s: %s -> g\n" % (f, msg))
1490 repo.ui.debug(" %s: %s -> g\n" % (f, msg))
1491 if verbose:
1491 if verbose:
1492 repo.ui.note(_("getting %s\n") % f)
1492 repo.ui.note(_("getting %s\n") % f)
1493
1493
1494 if backup:
1494 if backup:
1495 # If a file or directory exists with the same name, back that
1495 # If a file or directory exists with the same name, back that
1496 # up. Otherwise, look to see if there is a file that conflicts
1496 # up. Otherwise, look to see if there is a file that conflicts
1497 # with a directory this file is in, and if so, back that up.
1497 # with a directory this file is in, and if so, back that up.
1498 absf = repo.wjoin(f)
1498 absf = repo.wjoin(f)
1499 if not repo.wvfs.lexists(f):
1499 if not repo.wvfs.lexists(f):
1500 for p in util.finddirs(f):
1500 for p in util.finddirs(f):
1501 if repo.wvfs.isfileorlink(p):
1501 if repo.wvfs.isfileorlink(p):
1502 absf = repo.wjoin(p)
1502 absf = repo.wjoin(p)
1503 break
1503 break
1504 orig = scmutil.origpath(ui, repo, absf)
1504 orig = scmutil.origpath(ui, repo, absf)
1505 if repo.wvfs.lexists(absf):
1505 if repo.wvfs.lexists(absf):
1506 util.rename(absf, orig)
1506 util.rename(absf, orig)
1507 wctx[f].clearunknown()
1507 wctx[f].clearunknown()
1508 atomictemp = ui.configbool("experimental", "update.atomic-file")
1508 atomictemp = ui.configbool("experimental", "update.atomic-file")
1509 wctx[f].write(fctx(f).data(), flags, backgroundclose=True,
1509 wctx[f].write(fctx(f).data(), flags, backgroundclose=True,
1510 atomictemp=atomictemp)
1510 atomictemp=atomictemp)
1511 if i == 100:
1511 if i == 100:
1512 yield i, f
1512 yield i, f
1513 i = 0
1513 i = 0
1514 i += 1
1514 i += 1
1515 if i > 0:
1515 if i > 0:
1516 yield i, f
1516 yield i, f
1517
1517
1518 def _prefetchfiles(repo, ctx, actions):
1518 def _prefetchfiles(repo, ctx, actions):
1519 """Invoke ``scmutil.prefetchfiles()`` for the files relevant to the dict
1519 """Invoke ``scmutil.prefetchfiles()`` for the files relevant to the dict
1520 of merge actions. ``ctx`` is the context being merged in."""
1520 of merge actions. ``ctx`` is the context being merged in."""
1521
1521
1522 # Skipping 'a', 'am', 'f', 'r', 'dm', 'e', 'k', 'p' and 'pr', because they
1522 # Skipping 'a', 'am', 'f', 'r', 'dm', 'e', 'k', 'p' and 'pr', because they
1523 # don't touch the context to be merged in. 'cd' is skipped, because
1523 # don't touch the context to be merged in. 'cd' is skipped, because
1524 # changed/deleted never resolves to something from the remote side.
1524 # changed/deleted never resolves to something from the remote side.
1525 oplist = [actions[a] for a in (ACTION_GET, ACTION_DELETED_CHANGED,
1525 oplist = [actions[a] for a in (ACTION_GET, ACTION_DELETED_CHANGED,
1526 ACTION_LOCAL_DIR_RENAME_GET, ACTION_MERGE)]
1526 ACTION_LOCAL_DIR_RENAME_GET, ACTION_MERGE)]
1527 prefetch = scmutil.prefetchfiles
1527 prefetch = scmutil.prefetchfiles
1528 matchfiles = scmutil.matchfiles
1528 matchfiles = scmutil.matchfiles
1529 prefetch(repo, [ctx.rev()],
1529 prefetch(repo, [ctx.rev()],
1530 matchfiles(repo,
1530 matchfiles(repo,
1531 [f for sublist in oplist for f, args, msg in sublist]))
1531 [f for sublist in oplist for f, args, msg in sublist]))
1532
1532
1533 @attr.s(frozen=True)
1533 @attr.s(frozen=True)
1534 class updateresult(object):
1534 class updateresult(object):
1535 updatedcount = attr.ib()
1535 updatedcount = attr.ib()
1536 mergedcount = attr.ib()
1536 mergedcount = attr.ib()
1537 removedcount = attr.ib()
1537 removedcount = attr.ib()
1538 unresolvedcount = attr.ib()
1538 unresolvedcount = attr.ib()
1539
1539
1540 def isempty(self):
1540 def isempty(self):
1541 return (not self.updatedcount and not self.mergedcount
1541 return (not self.updatedcount and not self.mergedcount
1542 and not self.removedcount and not self.unresolvedcount)
1542 and not self.removedcount and not self.unresolvedcount)
1543
1543
1544 def applyupdates(repo, actions, wctx, mctx, overwrite, labels=None):
1544 def applyupdates(repo, actions, wctx, mctx, overwrite, labels=None):
1545 """apply the merge action list to the working directory
1545 """apply the merge action list to the working directory
1546
1546
1547 wctx is the working copy context
1547 wctx is the working copy context
1548 mctx is the context to be merged into the working copy
1548 mctx is the context to be merged into the working copy
1549
1549
1550 Return a tuple of counts (updated, merged, removed, unresolved) that
1550 Return a tuple of counts (updated, merged, removed, unresolved) that
1551 describes how many files were affected by the update.
1551 describes how many files were affected by the update.
1552 """
1552 """
1553
1553
1554 _prefetchfiles(repo, mctx, actions)
1554 _prefetchfiles(repo, mctx, actions)
1555
1555
1556 updated, merged, removed = 0, 0, 0
1556 updated, merged, removed = 0, 0, 0
1557 ms = mergestate.clean(repo, wctx.p1().node(), mctx.node(), labels)
1557 ms = mergestate.clean(repo, wctx.p1().node(), mctx.node(), labels)
1558 moves = []
1558 moves = []
1559 for m, l in actions.items():
1559 for m, l in actions.items():
1560 l.sort()
1560 l.sort()
1561
1561
1562 # 'cd' and 'dc' actions are treated like other merge conflicts
1562 # 'cd' and 'dc' actions are treated like other merge conflicts
1563 mergeactions = sorted(actions[ACTION_CHANGED_DELETED])
1563 mergeactions = sorted(actions[ACTION_CHANGED_DELETED])
1564 mergeactions.extend(sorted(actions[ACTION_DELETED_CHANGED]))
1564 mergeactions.extend(sorted(actions[ACTION_DELETED_CHANGED]))
1565 mergeactions.extend(actions[ACTION_MERGE])
1565 mergeactions.extend(actions[ACTION_MERGE])
1566 for f, args, msg in mergeactions:
1566 for f, args, msg in mergeactions:
1567 f1, f2, fa, move, anc = args
1567 f1, f2, fa, move, anc = args
1568 if f == '.hgsubstate': # merged internally
1568 if f == '.hgsubstate': # merged internally
1569 continue
1569 continue
1570 if f1 is None:
1570 if f1 is None:
1571 fcl = filemerge.absentfilectx(wctx, fa)
1571 fcl = filemerge.absentfilectx(wctx, fa)
1572 else:
1572 else:
1573 repo.ui.debug(" preserving %s for resolve of %s\n" % (f1, f))
1573 repo.ui.debug(" preserving %s for resolve of %s\n" % (f1, f))
1574 fcl = wctx[f1]
1574 fcl = wctx[f1]
1575 if f2 is None:
1575 if f2 is None:
1576 fco = filemerge.absentfilectx(mctx, fa)
1576 fco = filemerge.absentfilectx(mctx, fa)
1577 else:
1577 else:
1578 fco = mctx[f2]
1578 fco = mctx[f2]
1579 actx = repo[anc]
1579 actx = repo[anc]
1580 if fa in actx:
1580 if fa in actx:
1581 fca = actx[fa]
1581 fca = actx[fa]
1582 else:
1582 else:
1583 # TODO: move to absentfilectx
1583 # TODO: move to absentfilectx
1584 fca = repo.filectx(f1, fileid=nullrev)
1584 fca = repo.filectx(f1, fileid=nullrev)
1585 ms.add(fcl, fco, fca, f)
1585 ms.add(fcl, fco, fca, f)
1586 if f1 != f and move:
1586 if f1 != f and move:
1587 moves.append(f1)
1587 moves.append(f1)
1588
1588
1589 # remove renamed files after safely stored
1589 # remove renamed files after safely stored
1590 for f in moves:
1590 for f in moves:
1591 if wctx[f].lexists():
1591 if wctx[f].lexists():
1592 repo.ui.debug("removing %s\n" % f)
1592 repo.ui.debug("removing %s\n" % f)
1593 wctx[f].audit()
1593 wctx[f].audit()
1594 wctx[f].remove()
1594 wctx[f].remove()
1595
1595
1596 numupdates = sum(len(l) for m, l in actions.items()
1596 numupdates = sum(len(l) for m, l in actions.items()
1597 if m != ACTION_KEEP)
1597 if m != ACTION_KEEP)
1598 progress = repo.ui.makeprogress(_('updating'), unit=_('files'),
1598 progress = repo.ui.makeprogress(_('updating'), unit=_('files'),
1599 total=numupdates)
1599 total=numupdates)
1600
1600
1601 if [a for a in actions[ACTION_REMOVE] if a[0] == '.hgsubstate']:
1601 if [a for a in actions[ACTION_REMOVE] if a[0] == '.hgsubstate']:
1602 subrepoutil.submerge(repo, wctx, mctx, wctx, overwrite, labels)
1602 subrepoutil.submerge(repo, wctx, mctx, wctx, overwrite, labels)
1603
1603
1604 # record path conflicts
1604 # record path conflicts
1605 for f, args, msg in actions[ACTION_PATH_CONFLICT]:
1605 for f, args, msg in actions[ACTION_PATH_CONFLICT]:
1606 f1, fo = args
1606 f1, fo = args
1607 s = repo.ui.status
1607 s = repo.ui.status
1608 s(_("%s: path conflict - a file or link has the same name as a "
1608 s(_("%s: path conflict - a file or link has the same name as a "
1609 "directory\n") % f)
1609 "directory\n") % f)
1610 if fo == 'l':
1610 if fo == 'l':
1611 s(_("the local file has been renamed to %s\n") % f1)
1611 s(_("the local file has been renamed to %s\n") % f1)
1612 else:
1612 else:
1613 s(_("the remote file has been renamed to %s\n") % f1)
1613 s(_("the remote file has been renamed to %s\n") % f1)
1614 s(_("resolve manually then use 'hg resolve --mark %s'\n") % f)
1614 s(_("resolve manually then use 'hg resolve --mark %s'\n") % f)
1615 ms.addpath(f, f1, fo)
1615 ms.addpath(f, f1, fo)
1616 progress.increment(item=f)
1616 progress.increment(item=f)
1617
1617
1618 # When merging in-memory, we can't support worker processes, so set the
1618 # When merging in-memory, we can't support worker processes, so set the
1619 # per-item cost at 0 in that case.
1619 # per-item cost at 0 in that case.
1620 cost = 0 if wctx.isinmemory() else 0.001
1620 cost = 0 if wctx.isinmemory() else 0.001
1621
1621
1622 # remove in parallel (must come before resolving path conflicts and getting)
1622 # remove in parallel (must come before resolving path conflicts and getting)
1623 prog = worker.worker(repo.ui, cost, batchremove, (repo, wctx),
1623 prog = worker.worker(repo.ui, cost, batchremove, (repo, wctx),
1624 actions[ACTION_REMOVE])
1624 actions[ACTION_REMOVE])
1625 for i, item in prog:
1625 for i, item in prog:
1626 progress.increment(step=i, item=item)
1626 progress.increment(step=i, item=item)
1627 removed = len(actions[ACTION_REMOVE])
1627 removed = len(actions[ACTION_REMOVE])
1628
1628
1629 # resolve path conflicts (must come before getting)
1629 # resolve path conflicts (must come before getting)
1630 for f, args, msg in actions[ACTION_PATH_CONFLICT_RESOLVE]:
1630 for f, args, msg in actions[ACTION_PATH_CONFLICT_RESOLVE]:
1631 repo.ui.debug(" %s: %s -> pr\n" % (f, msg))
1631 repo.ui.debug(" %s: %s -> pr\n" % (f, msg))
1632 f0, = args
1632 f0, = args
1633 if wctx[f0].lexists():
1633 if wctx[f0].lexists():
1634 repo.ui.note(_("moving %s to %s\n") % (f0, f))
1634 repo.ui.note(_("moving %s to %s\n") % (f0, f))
1635 wctx[f].audit()
1635 wctx[f].audit()
1636 wctx[f].write(wctx.filectx(f0).data(), wctx.filectx(f0).flags())
1636 wctx[f].write(wctx.filectx(f0).data(), wctx.filectx(f0).flags())
1637 wctx[f0].remove()
1637 wctx[f0].remove()
1638 progress.increment(item=f)
1638 progress.increment(item=f)
1639
1639
1640 # get in parallel
1640 # get in parallel.
1641 threadsafe = repo.ui.configbool('experimental',
1642 'worker.wdir-get-thread-safe')
1641 prog = worker.worker(repo.ui, cost, batchget, (repo, mctx, wctx),
1643 prog = worker.worker(repo.ui, cost, batchget, (repo, mctx, wctx),
1642 actions[ACTION_GET])
1644 actions[ACTION_GET],
1645 threadsafe=threadsafe)
1643 for i, item in prog:
1646 for i, item in prog:
1644 progress.increment(step=i, item=item)
1647 progress.increment(step=i, item=item)
1645 updated = len(actions[ACTION_GET])
1648 updated = len(actions[ACTION_GET])
1646
1649
1647 if [a for a in actions[ACTION_GET] if a[0] == '.hgsubstate']:
1650 if [a for a in actions[ACTION_GET] if a[0] == '.hgsubstate']:
1648 subrepoutil.submerge(repo, wctx, mctx, wctx, overwrite, labels)
1651 subrepoutil.submerge(repo, wctx, mctx, wctx, overwrite, labels)
1649
1652
1650 # forget (manifest only, just log it) (must come first)
1653 # forget (manifest only, just log it) (must come first)
1651 for f, args, msg in actions[ACTION_FORGET]:
1654 for f, args, msg in actions[ACTION_FORGET]:
1652 repo.ui.debug(" %s: %s -> f\n" % (f, msg))
1655 repo.ui.debug(" %s: %s -> f\n" % (f, msg))
1653 progress.increment(item=f)
1656 progress.increment(item=f)
1654
1657
1655 # re-add (manifest only, just log it)
1658 # re-add (manifest only, just log it)
1656 for f, args, msg in actions[ACTION_ADD]:
1659 for f, args, msg in actions[ACTION_ADD]:
1657 repo.ui.debug(" %s: %s -> a\n" % (f, msg))
1660 repo.ui.debug(" %s: %s -> a\n" % (f, msg))
1658 progress.increment(item=f)
1661 progress.increment(item=f)
1659
1662
1660 # re-add/mark as modified (manifest only, just log it)
1663 # re-add/mark as modified (manifest only, just log it)
1661 for f, args, msg in actions[ACTION_ADD_MODIFIED]:
1664 for f, args, msg in actions[ACTION_ADD_MODIFIED]:
1662 repo.ui.debug(" %s: %s -> am\n" % (f, msg))
1665 repo.ui.debug(" %s: %s -> am\n" % (f, msg))
1663 progress.increment(item=f)
1666 progress.increment(item=f)
1664
1667
1665 # keep (noop, just log it)
1668 # keep (noop, just log it)
1666 for f, args, msg in actions[ACTION_KEEP]:
1669 for f, args, msg in actions[ACTION_KEEP]:
1667 repo.ui.debug(" %s: %s -> k\n" % (f, msg))
1670 repo.ui.debug(" %s: %s -> k\n" % (f, msg))
1668 # no progress
1671 # no progress
1669
1672
1670 # directory rename, move local
1673 # directory rename, move local
1671 for f, args, msg in actions[ACTION_DIR_RENAME_MOVE_LOCAL]:
1674 for f, args, msg in actions[ACTION_DIR_RENAME_MOVE_LOCAL]:
1672 repo.ui.debug(" %s: %s -> dm\n" % (f, msg))
1675 repo.ui.debug(" %s: %s -> dm\n" % (f, msg))
1673 progress.increment(item=f)
1676 progress.increment(item=f)
1674 f0, flags = args
1677 f0, flags = args
1675 repo.ui.note(_("moving %s to %s\n") % (f0, f))
1678 repo.ui.note(_("moving %s to %s\n") % (f0, f))
1676 wctx[f].audit()
1679 wctx[f].audit()
1677 wctx[f].write(wctx.filectx(f0).data(), flags)
1680 wctx[f].write(wctx.filectx(f0).data(), flags)
1678 wctx[f0].remove()
1681 wctx[f0].remove()
1679 updated += 1
1682 updated += 1
1680
1683
1681 # local directory rename, get
1684 # local directory rename, get
1682 for f, args, msg in actions[ACTION_LOCAL_DIR_RENAME_GET]:
1685 for f, args, msg in actions[ACTION_LOCAL_DIR_RENAME_GET]:
1683 repo.ui.debug(" %s: %s -> dg\n" % (f, msg))
1686 repo.ui.debug(" %s: %s -> dg\n" % (f, msg))
1684 progress.increment(item=f)
1687 progress.increment(item=f)
1685 f0, flags = args
1688 f0, flags = args
1686 repo.ui.note(_("getting %s to %s\n") % (f0, f))
1689 repo.ui.note(_("getting %s to %s\n") % (f0, f))
1687 wctx[f].write(mctx.filectx(f0).data(), flags)
1690 wctx[f].write(mctx.filectx(f0).data(), flags)
1688 updated += 1
1691 updated += 1
1689
1692
1690 # exec
1693 # exec
1691 for f, args, msg in actions[ACTION_EXEC]:
1694 for f, args, msg in actions[ACTION_EXEC]:
1692 repo.ui.debug(" %s: %s -> e\n" % (f, msg))
1695 repo.ui.debug(" %s: %s -> e\n" % (f, msg))
1693 progress.increment(item=f)
1696 progress.increment(item=f)
1694 flags, = args
1697 flags, = args
1695 wctx[f].audit()
1698 wctx[f].audit()
1696 wctx[f].setflags('l' in flags, 'x' in flags)
1699 wctx[f].setflags('l' in flags, 'x' in flags)
1697 updated += 1
1700 updated += 1
1698
1701
1699 # the ordering is important here -- ms.mergedriver will raise if the merge
1702 # the ordering is important here -- ms.mergedriver will raise if the merge
1700 # driver has changed, and we want to be able to bypass it when overwrite is
1703 # driver has changed, and we want to be able to bypass it when overwrite is
1701 # True
1704 # True
1702 usemergedriver = not overwrite and mergeactions and ms.mergedriver
1705 usemergedriver = not overwrite and mergeactions and ms.mergedriver
1703
1706
1704 if usemergedriver:
1707 if usemergedriver:
1705 if wctx.isinmemory():
1708 if wctx.isinmemory():
1706 raise error.InMemoryMergeConflictsError("in-memory merge does not "
1709 raise error.InMemoryMergeConflictsError("in-memory merge does not "
1707 "support mergedriver")
1710 "support mergedriver")
1708 ms.commit()
1711 ms.commit()
1709 proceed = driverpreprocess(repo, ms, wctx, labels=labels)
1712 proceed = driverpreprocess(repo, ms, wctx, labels=labels)
1710 # the driver might leave some files unresolved
1713 # the driver might leave some files unresolved
1711 unresolvedf = set(ms.unresolved())
1714 unresolvedf = set(ms.unresolved())
1712 if not proceed:
1715 if not proceed:
1713 # XXX setting unresolved to at least 1 is a hack to make sure we
1716 # XXX setting unresolved to at least 1 is a hack to make sure we
1714 # error out
1717 # error out
1715 return updateresult(updated, merged, removed,
1718 return updateresult(updated, merged, removed,
1716 max(len(unresolvedf), 1))
1719 max(len(unresolvedf), 1))
1717 newactions = []
1720 newactions = []
1718 for f, args, msg in mergeactions:
1721 for f, args, msg in mergeactions:
1719 if f in unresolvedf:
1722 if f in unresolvedf:
1720 newactions.append((f, args, msg))
1723 newactions.append((f, args, msg))
1721 mergeactions = newactions
1724 mergeactions = newactions
1722
1725
1723 try:
1726 try:
1724 # premerge
1727 # premerge
1725 tocomplete = []
1728 tocomplete = []
1726 for f, args, msg in mergeactions:
1729 for f, args, msg in mergeactions:
1727 repo.ui.debug(" %s: %s -> m (premerge)\n" % (f, msg))
1730 repo.ui.debug(" %s: %s -> m (premerge)\n" % (f, msg))
1728 progress.increment(item=f)
1731 progress.increment(item=f)
1729 if f == '.hgsubstate': # subrepo states need updating
1732 if f == '.hgsubstate': # subrepo states need updating
1730 subrepoutil.submerge(repo, wctx, mctx, wctx.ancestor(mctx),
1733 subrepoutil.submerge(repo, wctx, mctx, wctx.ancestor(mctx),
1731 overwrite, labels)
1734 overwrite, labels)
1732 continue
1735 continue
1733 wctx[f].audit()
1736 wctx[f].audit()
1734 complete, r = ms.preresolve(f, wctx)
1737 complete, r = ms.preresolve(f, wctx)
1735 if not complete:
1738 if not complete:
1736 numupdates += 1
1739 numupdates += 1
1737 tocomplete.append((f, args, msg))
1740 tocomplete.append((f, args, msg))
1738
1741
1739 # merge
1742 # merge
1740 for f, args, msg in tocomplete:
1743 for f, args, msg in tocomplete:
1741 repo.ui.debug(" %s: %s -> m (merge)\n" % (f, msg))
1744 repo.ui.debug(" %s: %s -> m (merge)\n" % (f, msg))
1742 progress.increment(item=f, total=numupdates)
1745 progress.increment(item=f, total=numupdates)
1743 ms.resolve(f, wctx)
1746 ms.resolve(f, wctx)
1744
1747
1745 finally:
1748 finally:
1746 ms.commit()
1749 ms.commit()
1747
1750
1748 unresolved = ms.unresolvedcount()
1751 unresolved = ms.unresolvedcount()
1749
1752
1750 if (usemergedriver and not unresolved
1753 if (usemergedriver and not unresolved
1751 and ms.mdstate() != MERGE_DRIVER_STATE_SUCCESS):
1754 and ms.mdstate() != MERGE_DRIVER_STATE_SUCCESS):
1752 if not driverconclude(repo, ms, wctx, labels=labels):
1755 if not driverconclude(repo, ms, wctx, labels=labels):
1753 # XXX setting unresolved to at least 1 is a hack to make sure we
1756 # XXX setting unresolved to at least 1 is a hack to make sure we
1754 # error out
1757 # error out
1755 unresolved = max(unresolved, 1)
1758 unresolved = max(unresolved, 1)
1756
1759
1757 ms.commit()
1760 ms.commit()
1758
1761
1759 msupdated, msmerged, msremoved = ms.counts()
1762 msupdated, msmerged, msremoved = ms.counts()
1760 updated += msupdated
1763 updated += msupdated
1761 merged += msmerged
1764 merged += msmerged
1762 removed += msremoved
1765 removed += msremoved
1763
1766
1764 extraactions = ms.actions()
1767 extraactions = ms.actions()
1765 if extraactions:
1768 if extraactions:
1766 mfiles = set(a[0] for a in actions[ACTION_MERGE])
1769 mfiles = set(a[0] for a in actions[ACTION_MERGE])
1767 for k, acts in extraactions.iteritems():
1770 for k, acts in extraactions.iteritems():
1768 actions[k].extend(acts)
1771 actions[k].extend(acts)
1769 # Remove these files from actions[ACTION_MERGE] as well. This is
1772 # Remove these files from actions[ACTION_MERGE] as well. This is
1770 # important because in recordupdates, files in actions[ACTION_MERGE]
1773 # important because in recordupdates, files in actions[ACTION_MERGE]
1771 # are processed after files in other actions, and the merge driver
1774 # are processed after files in other actions, and the merge driver
1772 # might add files to those actions via extraactions above. This can
1775 # might add files to those actions via extraactions above. This can
1773 # lead to a file being recorded twice, with poor results. This is
1776 # lead to a file being recorded twice, with poor results. This is
1774 # especially problematic for actions[ACTION_REMOVE] (currently only
1777 # especially problematic for actions[ACTION_REMOVE] (currently only
1775 # possible with the merge driver in the initial merge process;
1778 # possible with the merge driver in the initial merge process;
1776 # interrupted merges don't go through this flow).
1779 # interrupted merges don't go through this flow).
1777 #
1780 #
1778 # The real fix here is to have indexes by both file and action so
1781 # The real fix here is to have indexes by both file and action so
1779 # that when the action for a file is changed it is automatically
1782 # that when the action for a file is changed it is automatically
1780 # reflected in the other action lists. But that involves a more
1783 # reflected in the other action lists. But that involves a more
1781 # complex data structure, so this will do for now.
1784 # complex data structure, so this will do for now.
1782 #
1785 #
1783 # We don't need to do the same operation for 'dc' and 'cd' because
1786 # We don't need to do the same operation for 'dc' and 'cd' because
1784 # those lists aren't consulted again.
1787 # those lists aren't consulted again.
1785 mfiles.difference_update(a[0] for a in acts)
1788 mfiles.difference_update(a[0] for a in acts)
1786
1789
1787 actions[ACTION_MERGE] = [a for a in actions[ACTION_MERGE]
1790 actions[ACTION_MERGE] = [a for a in actions[ACTION_MERGE]
1788 if a[0] in mfiles]
1791 if a[0] in mfiles]
1789
1792
1790 progress.complete()
1793 progress.complete()
1791 return updateresult(updated, merged, removed, unresolved)
1794 return updateresult(updated, merged, removed, unresolved)
1792
1795
1793 def recordupdates(repo, actions, branchmerge):
1796 def recordupdates(repo, actions, branchmerge):
1794 "record merge actions to the dirstate"
1797 "record merge actions to the dirstate"
1795 # remove (must come first)
1798 # remove (must come first)
1796 for f, args, msg in actions.get(ACTION_REMOVE, []):
1799 for f, args, msg in actions.get(ACTION_REMOVE, []):
1797 if branchmerge:
1800 if branchmerge:
1798 repo.dirstate.remove(f)
1801 repo.dirstate.remove(f)
1799 else:
1802 else:
1800 repo.dirstate.drop(f)
1803 repo.dirstate.drop(f)
1801
1804
1802 # forget (must come first)
1805 # forget (must come first)
1803 for f, args, msg in actions.get(ACTION_FORGET, []):
1806 for f, args, msg in actions.get(ACTION_FORGET, []):
1804 repo.dirstate.drop(f)
1807 repo.dirstate.drop(f)
1805
1808
1806 # resolve path conflicts
1809 # resolve path conflicts
1807 for f, args, msg in actions.get(ACTION_PATH_CONFLICT_RESOLVE, []):
1810 for f, args, msg in actions.get(ACTION_PATH_CONFLICT_RESOLVE, []):
1808 f0, = args
1811 f0, = args
1809 origf0 = repo.dirstate.copied(f0) or f0
1812 origf0 = repo.dirstate.copied(f0) or f0
1810 repo.dirstate.add(f)
1813 repo.dirstate.add(f)
1811 repo.dirstate.copy(origf0, f)
1814 repo.dirstate.copy(origf0, f)
1812 if f0 == origf0:
1815 if f0 == origf0:
1813 repo.dirstate.remove(f0)
1816 repo.dirstate.remove(f0)
1814 else:
1817 else:
1815 repo.dirstate.drop(f0)
1818 repo.dirstate.drop(f0)
1816
1819
1817 # re-add
1820 # re-add
1818 for f, args, msg in actions.get(ACTION_ADD, []):
1821 for f, args, msg in actions.get(ACTION_ADD, []):
1819 repo.dirstate.add(f)
1822 repo.dirstate.add(f)
1820
1823
1821 # re-add/mark as modified
1824 # re-add/mark as modified
1822 for f, args, msg in actions.get(ACTION_ADD_MODIFIED, []):
1825 for f, args, msg in actions.get(ACTION_ADD_MODIFIED, []):
1823 if branchmerge:
1826 if branchmerge:
1824 repo.dirstate.normallookup(f)
1827 repo.dirstate.normallookup(f)
1825 else:
1828 else:
1826 repo.dirstate.add(f)
1829 repo.dirstate.add(f)
1827
1830
1828 # exec change
1831 # exec change
1829 for f, args, msg in actions.get(ACTION_EXEC, []):
1832 for f, args, msg in actions.get(ACTION_EXEC, []):
1830 repo.dirstate.normallookup(f)
1833 repo.dirstate.normallookup(f)
1831
1834
1832 # keep
1835 # keep
1833 for f, args, msg in actions.get(ACTION_KEEP, []):
1836 for f, args, msg in actions.get(ACTION_KEEP, []):
1834 pass
1837 pass
1835
1838
1836 # get
1839 # get
1837 for f, args, msg in actions.get(ACTION_GET, []):
1840 for f, args, msg in actions.get(ACTION_GET, []):
1838 if branchmerge:
1841 if branchmerge:
1839 repo.dirstate.otherparent(f)
1842 repo.dirstate.otherparent(f)
1840 else:
1843 else:
1841 repo.dirstate.normal(f)
1844 repo.dirstate.normal(f)
1842
1845
1843 # merge
1846 # merge
1844 for f, args, msg in actions.get(ACTION_MERGE, []):
1847 for f, args, msg in actions.get(ACTION_MERGE, []):
1845 f1, f2, fa, move, anc = args
1848 f1, f2, fa, move, anc = args
1846 if branchmerge:
1849 if branchmerge:
1847 # We've done a branch merge, mark this file as merged
1850 # We've done a branch merge, mark this file as merged
1848 # so that we properly record the merger later
1851 # so that we properly record the merger later
1849 repo.dirstate.merge(f)
1852 repo.dirstate.merge(f)
1850 if f1 != f2: # copy/rename
1853 if f1 != f2: # copy/rename
1851 if move:
1854 if move:
1852 repo.dirstate.remove(f1)
1855 repo.dirstate.remove(f1)
1853 if f1 != f:
1856 if f1 != f:
1854 repo.dirstate.copy(f1, f)
1857 repo.dirstate.copy(f1, f)
1855 else:
1858 else:
1856 repo.dirstate.copy(f2, f)
1859 repo.dirstate.copy(f2, f)
1857 else:
1860 else:
1858 # We've update-merged a locally modified file, so
1861 # We've update-merged a locally modified file, so
1859 # we set the dirstate to emulate a normal checkout
1862 # we set the dirstate to emulate a normal checkout
1860 # of that file some time in the past. Thus our
1863 # of that file some time in the past. Thus our
1861 # merge will appear as a normal local file
1864 # merge will appear as a normal local file
1862 # modification.
1865 # modification.
1863 if f2 == f: # file not locally copied/moved
1866 if f2 == f: # file not locally copied/moved
1864 repo.dirstate.normallookup(f)
1867 repo.dirstate.normallookup(f)
1865 if move:
1868 if move:
1866 repo.dirstate.drop(f1)
1869 repo.dirstate.drop(f1)
1867
1870
1868 # directory rename, move local
1871 # directory rename, move local
1869 for f, args, msg in actions.get(ACTION_DIR_RENAME_MOVE_LOCAL, []):
1872 for f, args, msg in actions.get(ACTION_DIR_RENAME_MOVE_LOCAL, []):
1870 f0, flag = args
1873 f0, flag = args
1871 if branchmerge:
1874 if branchmerge:
1872 repo.dirstate.add(f)
1875 repo.dirstate.add(f)
1873 repo.dirstate.remove(f0)
1876 repo.dirstate.remove(f0)
1874 repo.dirstate.copy(f0, f)
1877 repo.dirstate.copy(f0, f)
1875 else:
1878 else:
1876 repo.dirstate.normal(f)
1879 repo.dirstate.normal(f)
1877 repo.dirstate.drop(f0)
1880 repo.dirstate.drop(f0)
1878
1881
1879 # directory rename, get
1882 # directory rename, get
1880 for f, args, msg in actions.get(ACTION_LOCAL_DIR_RENAME_GET, []):
1883 for f, args, msg in actions.get(ACTION_LOCAL_DIR_RENAME_GET, []):
1881 f0, flag = args
1884 f0, flag = args
1882 if branchmerge:
1885 if branchmerge:
1883 repo.dirstate.add(f)
1886 repo.dirstate.add(f)
1884 repo.dirstate.copy(f0, f)
1887 repo.dirstate.copy(f0, f)
1885 else:
1888 else:
1886 repo.dirstate.normal(f)
1889 repo.dirstate.normal(f)
1887
1890
1888 def update(repo, node, branchmerge, force, ancestor=None,
1891 def update(repo, node, branchmerge, force, ancestor=None,
1889 mergeancestor=False, labels=None, matcher=None, mergeforce=False,
1892 mergeancestor=False, labels=None, matcher=None, mergeforce=False,
1890 updatecheck=None, wc=None):
1893 updatecheck=None, wc=None):
1891 """
1894 """
1892 Perform a merge between the working directory and the given node
1895 Perform a merge between the working directory and the given node
1893
1896
1894 node = the node to update to
1897 node = the node to update to
1895 branchmerge = whether to merge between branches
1898 branchmerge = whether to merge between branches
1896 force = whether to force branch merging or file overwriting
1899 force = whether to force branch merging or file overwriting
1897 matcher = a matcher to filter file lists (dirstate not updated)
1900 matcher = a matcher to filter file lists (dirstate not updated)
1898 mergeancestor = whether it is merging with an ancestor. If true,
1901 mergeancestor = whether it is merging with an ancestor. If true,
1899 we should accept the incoming changes for any prompts that occur.
1902 we should accept the incoming changes for any prompts that occur.
1900 If false, merging with an ancestor (fast-forward) is only allowed
1903 If false, merging with an ancestor (fast-forward) is only allowed
1901 between different named branches. This flag is used by rebase extension
1904 between different named branches. This flag is used by rebase extension
1902 as a temporary fix and should be avoided in general.
1905 as a temporary fix and should be avoided in general.
1903 labels = labels to use for base, local and other
1906 labels = labels to use for base, local and other
1904 mergeforce = whether the merge was run with 'merge --force' (deprecated): if
1907 mergeforce = whether the merge was run with 'merge --force' (deprecated): if
1905 this is True, then 'force' should be True as well.
1908 this is True, then 'force' should be True as well.
1906
1909
1907 The table below shows all the behaviors of the update command given the
1910 The table below shows all the behaviors of the update command given the
1908 -c/--check and -C/--clean or no options, whether the working directory is
1911 -c/--check and -C/--clean or no options, whether the working directory is
1909 dirty, whether a revision is specified, and the relationship of the parent
1912 dirty, whether a revision is specified, and the relationship of the parent
1910 rev to the target rev (linear or not). Match from top first. The -n
1913 rev to the target rev (linear or not). Match from top first. The -n
1911 option doesn't exist on the command line, but represents the
1914 option doesn't exist on the command line, but represents the
1912 experimental.updatecheck=noconflict option.
1915 experimental.updatecheck=noconflict option.
1913
1916
1914 This logic is tested by test-update-branches.t.
1917 This logic is tested by test-update-branches.t.
1915
1918
1916 -c -C -n -m dirty rev linear | result
1919 -c -C -n -m dirty rev linear | result
1917 y y * * * * * | (1)
1920 y y * * * * * | (1)
1918 y * y * * * * | (1)
1921 y * y * * * * | (1)
1919 y * * y * * * | (1)
1922 y * * y * * * | (1)
1920 * y y * * * * | (1)
1923 * y y * * * * | (1)
1921 * y * y * * * | (1)
1924 * y * y * * * | (1)
1922 * * y y * * * | (1)
1925 * * y y * * * | (1)
1923 * * * * * n n | x
1926 * * * * * n n | x
1924 * * * * n * * | ok
1927 * * * * n * * | ok
1925 n n n n y * y | merge
1928 n n n n y * y | merge
1926 n n n n y y n | (2)
1929 n n n n y y n | (2)
1927 n n n y y * * | merge
1930 n n n y y * * | merge
1928 n n y n y * * | merge if no conflict
1931 n n y n y * * | merge if no conflict
1929 n y n n y * * | discard
1932 n y n n y * * | discard
1930 y n n n y * * | (3)
1933 y n n n y * * | (3)
1931
1934
1932 x = can't happen
1935 x = can't happen
1933 * = don't-care
1936 * = don't-care
1934 1 = incompatible options (checked in commands.py)
1937 1 = incompatible options (checked in commands.py)
1935 2 = abort: uncommitted changes (commit or update --clean to discard changes)
1938 2 = abort: uncommitted changes (commit or update --clean to discard changes)
1936 3 = abort: uncommitted changes (checked in commands.py)
1939 3 = abort: uncommitted changes (checked in commands.py)
1937
1940
1938 The merge is performed inside ``wc``, a workingctx-like objects. It defaults
1941 The merge is performed inside ``wc``, a workingctx-like objects. It defaults
1939 to repo[None] if None is passed.
1942 to repo[None] if None is passed.
1940
1943
1941 Return the same tuple as applyupdates().
1944 Return the same tuple as applyupdates().
1942 """
1945 """
1943 # Avoid cycle.
1946 # Avoid cycle.
1944 from . import sparse
1947 from . import sparse
1945
1948
1946 # This function used to find the default destination if node was None, but
1949 # This function used to find the default destination if node was None, but
1947 # that's now in destutil.py.
1950 # that's now in destutil.py.
1948 assert node is not None
1951 assert node is not None
1949 if not branchmerge and not force:
1952 if not branchmerge and not force:
1950 # TODO: remove the default once all callers that pass branchmerge=False
1953 # TODO: remove the default once all callers that pass branchmerge=False
1951 # and force=False pass a value for updatecheck. We may want to allow
1954 # and force=False pass a value for updatecheck. We may want to allow
1952 # updatecheck='abort' to better suppport some of these callers.
1955 # updatecheck='abort' to better suppport some of these callers.
1953 if updatecheck is None:
1956 if updatecheck is None:
1954 updatecheck = 'linear'
1957 updatecheck = 'linear'
1955 assert updatecheck in ('none', 'linear', 'noconflict')
1958 assert updatecheck in ('none', 'linear', 'noconflict')
1956 # If we're doing a partial update, we need to skip updating
1959 # If we're doing a partial update, we need to skip updating
1957 # the dirstate, so make a note of any partial-ness to the
1960 # the dirstate, so make a note of any partial-ness to the
1958 # update here.
1961 # update here.
1959 if matcher is None or matcher.always():
1962 if matcher is None or matcher.always():
1960 partial = False
1963 partial = False
1961 else:
1964 else:
1962 partial = True
1965 partial = True
1963 with repo.wlock():
1966 with repo.wlock():
1964 if wc is None:
1967 if wc is None:
1965 wc = repo[None]
1968 wc = repo[None]
1966 pl = wc.parents()
1969 pl = wc.parents()
1967 p1 = pl[0]
1970 p1 = pl[0]
1968 pas = [None]
1971 pas = [None]
1969 if ancestor is not None:
1972 if ancestor is not None:
1970 pas = [repo[ancestor]]
1973 pas = [repo[ancestor]]
1971
1974
1972 overwrite = force and not branchmerge
1975 overwrite = force and not branchmerge
1973
1976
1974 p2 = repo[node]
1977 p2 = repo[node]
1975 if pas[0] is None:
1978 if pas[0] is None:
1976 if repo.ui.configlist('merge', 'preferancestor') == ['*']:
1979 if repo.ui.configlist('merge', 'preferancestor') == ['*']:
1977 cahs = repo.changelog.commonancestorsheads(p1.node(), p2.node())
1980 cahs = repo.changelog.commonancestorsheads(p1.node(), p2.node())
1978 pas = [repo[anc] for anc in (sorted(cahs) or [nullid])]
1981 pas = [repo[anc] for anc in (sorted(cahs) or [nullid])]
1979 else:
1982 else:
1980 pas = [p1.ancestor(p2, warn=branchmerge)]
1983 pas = [p1.ancestor(p2, warn=branchmerge)]
1981
1984
1982 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), bytes(p1), bytes(p2)
1985 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), bytes(p1), bytes(p2)
1983
1986
1984 ### check phase
1987 ### check phase
1985 if not overwrite:
1988 if not overwrite:
1986 if len(pl) > 1:
1989 if len(pl) > 1:
1987 raise error.Abort(_("outstanding uncommitted merge"))
1990 raise error.Abort(_("outstanding uncommitted merge"))
1988 ms = mergestate.read(repo)
1991 ms = mergestate.read(repo)
1989 if list(ms.unresolved()):
1992 if list(ms.unresolved()):
1990 raise error.Abort(_("outstanding merge conflicts"))
1993 raise error.Abort(_("outstanding merge conflicts"))
1991 if branchmerge:
1994 if branchmerge:
1992 if pas == [p2]:
1995 if pas == [p2]:
1993 raise error.Abort(_("merging with a working directory ancestor"
1996 raise error.Abort(_("merging with a working directory ancestor"
1994 " has no effect"))
1997 " has no effect"))
1995 elif pas == [p1]:
1998 elif pas == [p1]:
1996 if not mergeancestor and wc.branch() == p2.branch():
1999 if not mergeancestor and wc.branch() == p2.branch():
1997 raise error.Abort(_("nothing to merge"),
2000 raise error.Abort(_("nothing to merge"),
1998 hint=_("use 'hg update' "
2001 hint=_("use 'hg update' "
1999 "or check 'hg heads'"))
2002 "or check 'hg heads'"))
2000 if not force and (wc.files() or wc.deleted()):
2003 if not force and (wc.files() or wc.deleted()):
2001 raise error.Abort(_("uncommitted changes"),
2004 raise error.Abort(_("uncommitted changes"),
2002 hint=_("use 'hg status' to list changes"))
2005 hint=_("use 'hg status' to list changes"))
2003 if not wc.isinmemory():
2006 if not wc.isinmemory():
2004 for s in sorted(wc.substate):
2007 for s in sorted(wc.substate):
2005 wc.sub(s).bailifchanged()
2008 wc.sub(s).bailifchanged()
2006
2009
2007 elif not overwrite:
2010 elif not overwrite:
2008 if p1 == p2: # no-op update
2011 if p1 == p2: # no-op update
2009 # call the hooks and exit early
2012 # call the hooks and exit early
2010 repo.hook('preupdate', throw=True, parent1=xp2, parent2='')
2013 repo.hook('preupdate', throw=True, parent1=xp2, parent2='')
2011 repo.hook('update', parent1=xp2, parent2='', error=0)
2014 repo.hook('update', parent1=xp2, parent2='', error=0)
2012 return updateresult(0, 0, 0, 0)
2015 return updateresult(0, 0, 0, 0)
2013
2016
2014 if (updatecheck == 'linear' and
2017 if (updatecheck == 'linear' and
2015 pas not in ([p1], [p2])): # nonlinear
2018 pas not in ([p1], [p2])): # nonlinear
2016 dirty = wc.dirty(missing=True)
2019 dirty = wc.dirty(missing=True)
2017 if dirty:
2020 if dirty:
2018 # Branching is a bit strange to ensure we do the minimal
2021 # Branching is a bit strange to ensure we do the minimal
2019 # amount of call to obsutil.foreground.
2022 # amount of call to obsutil.foreground.
2020 foreground = obsutil.foreground(repo, [p1.node()])
2023 foreground = obsutil.foreground(repo, [p1.node()])
2021 # note: the <node> variable contains a random identifier
2024 # note: the <node> variable contains a random identifier
2022 if repo[node].node() in foreground:
2025 if repo[node].node() in foreground:
2023 pass # allow updating to successors
2026 pass # allow updating to successors
2024 else:
2027 else:
2025 msg = _("uncommitted changes")
2028 msg = _("uncommitted changes")
2026 hint = _("commit or update --clean to discard changes")
2029 hint = _("commit or update --clean to discard changes")
2027 raise error.UpdateAbort(msg, hint=hint)
2030 raise error.UpdateAbort(msg, hint=hint)
2028 else:
2031 else:
2029 # Allow jumping branches if clean and specific rev given
2032 # Allow jumping branches if clean and specific rev given
2030 pass
2033 pass
2031
2034
2032 if overwrite:
2035 if overwrite:
2033 pas = [wc]
2036 pas = [wc]
2034 elif not branchmerge:
2037 elif not branchmerge:
2035 pas = [p1]
2038 pas = [p1]
2036
2039
2037 # deprecated config: merge.followcopies
2040 # deprecated config: merge.followcopies
2038 followcopies = repo.ui.configbool('merge', 'followcopies')
2041 followcopies = repo.ui.configbool('merge', 'followcopies')
2039 if overwrite:
2042 if overwrite:
2040 followcopies = False
2043 followcopies = False
2041 elif not pas[0]:
2044 elif not pas[0]:
2042 followcopies = False
2045 followcopies = False
2043 if not branchmerge and not wc.dirty(missing=True):
2046 if not branchmerge and not wc.dirty(missing=True):
2044 followcopies = False
2047 followcopies = False
2045
2048
2046 ### calculate phase
2049 ### calculate phase
2047 actionbyfile, diverge, renamedelete = calculateupdates(
2050 actionbyfile, diverge, renamedelete = calculateupdates(
2048 repo, wc, p2, pas, branchmerge, force, mergeancestor,
2051 repo, wc, p2, pas, branchmerge, force, mergeancestor,
2049 followcopies, matcher=matcher, mergeforce=mergeforce)
2052 followcopies, matcher=matcher, mergeforce=mergeforce)
2050
2053
2051 if updatecheck == 'noconflict':
2054 if updatecheck == 'noconflict':
2052 for f, (m, args, msg) in actionbyfile.iteritems():
2055 for f, (m, args, msg) in actionbyfile.iteritems():
2053 if m not in (ACTION_GET, ACTION_KEEP, ACTION_EXEC,
2056 if m not in (ACTION_GET, ACTION_KEEP, ACTION_EXEC,
2054 ACTION_REMOVE, ACTION_PATH_CONFLICT_RESOLVE):
2057 ACTION_REMOVE, ACTION_PATH_CONFLICT_RESOLVE):
2055 msg = _("conflicting changes")
2058 msg = _("conflicting changes")
2056 hint = _("commit or update --clean to discard changes")
2059 hint = _("commit or update --clean to discard changes")
2057 raise error.Abort(msg, hint=hint)
2060 raise error.Abort(msg, hint=hint)
2058
2061
2059 # Prompt and create actions. Most of this is in the resolve phase
2062 # Prompt and create actions. Most of this is in the resolve phase
2060 # already, but we can't handle .hgsubstate in filemerge or
2063 # already, but we can't handle .hgsubstate in filemerge or
2061 # subrepoutil.submerge yet so we have to keep prompting for it.
2064 # subrepoutil.submerge yet so we have to keep prompting for it.
2062 if '.hgsubstate' in actionbyfile:
2065 if '.hgsubstate' in actionbyfile:
2063 f = '.hgsubstate'
2066 f = '.hgsubstate'
2064 m, args, msg = actionbyfile[f]
2067 m, args, msg = actionbyfile[f]
2065 prompts = filemerge.partextras(labels)
2068 prompts = filemerge.partextras(labels)
2066 prompts['f'] = f
2069 prompts['f'] = f
2067 if m == ACTION_CHANGED_DELETED:
2070 if m == ACTION_CHANGED_DELETED:
2068 if repo.ui.promptchoice(
2071 if repo.ui.promptchoice(
2069 _("local%(l)s changed %(f)s which other%(o)s deleted\n"
2072 _("local%(l)s changed %(f)s which other%(o)s deleted\n"
2070 "use (c)hanged version or (d)elete?"
2073 "use (c)hanged version or (d)elete?"
2071 "$$ &Changed $$ &Delete") % prompts, 0):
2074 "$$ &Changed $$ &Delete") % prompts, 0):
2072 actionbyfile[f] = (ACTION_REMOVE, None, 'prompt delete')
2075 actionbyfile[f] = (ACTION_REMOVE, None, 'prompt delete')
2073 elif f in p1:
2076 elif f in p1:
2074 actionbyfile[f] = (ACTION_ADD_MODIFIED, None, 'prompt keep')
2077 actionbyfile[f] = (ACTION_ADD_MODIFIED, None, 'prompt keep')
2075 else:
2078 else:
2076 actionbyfile[f] = (ACTION_ADD, None, 'prompt keep')
2079 actionbyfile[f] = (ACTION_ADD, None, 'prompt keep')
2077 elif m == ACTION_DELETED_CHANGED:
2080 elif m == ACTION_DELETED_CHANGED:
2078 f1, f2, fa, move, anc = args
2081 f1, f2, fa, move, anc = args
2079 flags = p2[f2].flags()
2082 flags = p2[f2].flags()
2080 if repo.ui.promptchoice(
2083 if repo.ui.promptchoice(
2081 _("other%(o)s changed %(f)s which local%(l)s deleted\n"
2084 _("other%(o)s changed %(f)s which local%(l)s deleted\n"
2082 "use (c)hanged version or leave (d)eleted?"
2085 "use (c)hanged version or leave (d)eleted?"
2083 "$$ &Changed $$ &Deleted") % prompts, 0) == 0:
2086 "$$ &Changed $$ &Deleted") % prompts, 0) == 0:
2084 actionbyfile[f] = (ACTION_GET, (flags, False),
2087 actionbyfile[f] = (ACTION_GET, (flags, False),
2085 'prompt recreating')
2088 'prompt recreating')
2086 else:
2089 else:
2087 del actionbyfile[f]
2090 del actionbyfile[f]
2088
2091
2089 # Convert to dictionary-of-lists format
2092 # Convert to dictionary-of-lists format
2090 actions = dict((m, [])
2093 actions = dict((m, [])
2091 for m in (
2094 for m in (
2092 ACTION_ADD,
2095 ACTION_ADD,
2093 ACTION_ADD_MODIFIED,
2096 ACTION_ADD_MODIFIED,
2094 ACTION_FORGET,
2097 ACTION_FORGET,
2095 ACTION_GET,
2098 ACTION_GET,
2096 ACTION_CHANGED_DELETED,
2099 ACTION_CHANGED_DELETED,
2097 ACTION_DELETED_CHANGED,
2100 ACTION_DELETED_CHANGED,
2098 ACTION_REMOVE,
2101 ACTION_REMOVE,
2099 ACTION_DIR_RENAME_MOVE_LOCAL,
2102 ACTION_DIR_RENAME_MOVE_LOCAL,
2100 ACTION_LOCAL_DIR_RENAME_GET,
2103 ACTION_LOCAL_DIR_RENAME_GET,
2101 ACTION_MERGE,
2104 ACTION_MERGE,
2102 ACTION_EXEC,
2105 ACTION_EXEC,
2103 ACTION_KEEP,
2106 ACTION_KEEP,
2104 ACTION_PATH_CONFLICT,
2107 ACTION_PATH_CONFLICT,
2105 ACTION_PATH_CONFLICT_RESOLVE))
2108 ACTION_PATH_CONFLICT_RESOLVE))
2106 for f, (m, args, msg) in actionbyfile.iteritems():
2109 for f, (m, args, msg) in actionbyfile.iteritems():
2107 if m not in actions:
2110 if m not in actions:
2108 actions[m] = []
2111 actions[m] = []
2109 actions[m].append((f, args, msg))
2112 actions[m].append((f, args, msg))
2110
2113
2111 if not util.fscasesensitive(repo.path):
2114 if not util.fscasesensitive(repo.path):
2112 # check collision between files only in p2 for clean update
2115 # check collision between files only in p2 for clean update
2113 if (not branchmerge and
2116 if (not branchmerge and
2114 (force or not wc.dirty(missing=True, branch=False))):
2117 (force or not wc.dirty(missing=True, branch=False))):
2115 _checkcollision(repo, p2.manifest(), None)
2118 _checkcollision(repo, p2.manifest(), None)
2116 else:
2119 else:
2117 _checkcollision(repo, wc.manifest(), actions)
2120 _checkcollision(repo, wc.manifest(), actions)
2118
2121
2119 # divergent renames
2122 # divergent renames
2120 for f, fl in sorted(diverge.iteritems()):
2123 for f, fl in sorted(diverge.iteritems()):
2121 repo.ui.warn(_("note: possible conflict - %s was renamed "
2124 repo.ui.warn(_("note: possible conflict - %s was renamed "
2122 "multiple times to:\n") % f)
2125 "multiple times to:\n") % f)
2123 for nf in fl:
2126 for nf in fl:
2124 repo.ui.warn(" %s\n" % nf)
2127 repo.ui.warn(" %s\n" % nf)
2125
2128
2126 # rename and delete
2129 # rename and delete
2127 for f, fl in sorted(renamedelete.iteritems()):
2130 for f, fl in sorted(renamedelete.iteritems()):
2128 repo.ui.warn(_("note: possible conflict - %s was deleted "
2131 repo.ui.warn(_("note: possible conflict - %s was deleted "
2129 "and renamed to:\n") % f)
2132 "and renamed to:\n") % f)
2130 for nf in fl:
2133 for nf in fl:
2131 repo.ui.warn(" %s\n" % nf)
2134 repo.ui.warn(" %s\n" % nf)
2132
2135
2133 ### apply phase
2136 ### apply phase
2134 if not branchmerge: # just jump to the new rev
2137 if not branchmerge: # just jump to the new rev
2135 fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
2138 fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
2136 if not partial and not wc.isinmemory():
2139 if not partial and not wc.isinmemory():
2137 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
2140 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
2138 # note that we're in the middle of an update
2141 # note that we're in the middle of an update
2139 repo.vfs.write('updatestate', p2.hex())
2142 repo.vfs.write('updatestate', p2.hex())
2140
2143
2141 # Advertise fsmonitor when its presence could be useful.
2144 # Advertise fsmonitor when its presence could be useful.
2142 #
2145 #
2143 # We only advertise when performing an update from an empty working
2146 # We only advertise when performing an update from an empty working
2144 # directory. This typically only occurs during initial clone.
2147 # directory. This typically only occurs during initial clone.
2145 #
2148 #
2146 # We give users a mechanism to disable the warning in case it is
2149 # We give users a mechanism to disable the warning in case it is
2147 # annoying.
2150 # annoying.
2148 #
2151 #
2149 # We only allow on Linux and MacOS because that's where fsmonitor is
2152 # We only allow on Linux and MacOS because that's where fsmonitor is
2150 # considered stable.
2153 # considered stable.
2151 fsmonitorwarning = repo.ui.configbool('fsmonitor', 'warn_when_unused')
2154 fsmonitorwarning = repo.ui.configbool('fsmonitor', 'warn_when_unused')
2152 fsmonitorthreshold = repo.ui.configint('fsmonitor',
2155 fsmonitorthreshold = repo.ui.configint('fsmonitor',
2153 'warn_update_file_count')
2156 'warn_update_file_count')
2154 try:
2157 try:
2155 # avoid cycle: extensions -> cmdutil -> merge
2158 # avoid cycle: extensions -> cmdutil -> merge
2156 from . import extensions
2159 from . import extensions
2157 extensions.find('fsmonitor')
2160 extensions.find('fsmonitor')
2158 fsmonitorenabled = repo.ui.config('fsmonitor', 'mode') != 'off'
2161 fsmonitorenabled = repo.ui.config('fsmonitor', 'mode') != 'off'
2159 # We intentionally don't look at whether fsmonitor has disabled
2162 # We intentionally don't look at whether fsmonitor has disabled
2160 # itself because a) fsmonitor may have already printed a warning
2163 # itself because a) fsmonitor may have already printed a warning
2161 # b) we only care about the config state here.
2164 # b) we only care about the config state here.
2162 except KeyError:
2165 except KeyError:
2163 fsmonitorenabled = False
2166 fsmonitorenabled = False
2164
2167
2165 if (fsmonitorwarning
2168 if (fsmonitorwarning
2166 and not fsmonitorenabled
2169 and not fsmonitorenabled
2167 and p1.node() == nullid
2170 and p1.node() == nullid
2168 and len(actions[ACTION_GET]) >= fsmonitorthreshold
2171 and len(actions[ACTION_GET]) >= fsmonitorthreshold
2169 and pycompat.sysplatform.startswith(('linux', 'darwin'))):
2172 and pycompat.sysplatform.startswith(('linux', 'darwin'))):
2170 repo.ui.warn(
2173 repo.ui.warn(
2171 _('(warning: large working directory being used without '
2174 _('(warning: large working directory being used without '
2172 'fsmonitor enabled; enable fsmonitor to improve performance; '
2175 'fsmonitor enabled; enable fsmonitor to improve performance; '
2173 'see "hg help -e fsmonitor")\n'))
2176 'see "hg help -e fsmonitor")\n'))
2174
2177
2175 stats = applyupdates(repo, actions, wc, p2, overwrite, labels=labels)
2178 stats = applyupdates(repo, actions, wc, p2, overwrite, labels=labels)
2176
2179
2177 if not partial and not wc.isinmemory():
2180 if not partial and not wc.isinmemory():
2178 with repo.dirstate.parentchange():
2181 with repo.dirstate.parentchange():
2179 repo.setparents(fp1, fp2)
2182 repo.setparents(fp1, fp2)
2180 recordupdates(repo, actions, branchmerge)
2183 recordupdates(repo, actions, branchmerge)
2181 # update completed, clear state
2184 # update completed, clear state
2182 util.unlink(repo.vfs.join('updatestate'))
2185 util.unlink(repo.vfs.join('updatestate'))
2183
2186
2184 if not branchmerge:
2187 if not branchmerge:
2185 repo.dirstate.setbranch(p2.branch())
2188 repo.dirstate.setbranch(p2.branch())
2186
2189
2187 # If we're updating to a location, clean up any stale temporary includes
2190 # If we're updating to a location, clean up any stale temporary includes
2188 # (ex: this happens during hg rebase --abort).
2191 # (ex: this happens during hg rebase --abort).
2189 if not branchmerge:
2192 if not branchmerge:
2190 sparse.prunetemporaryincludes(repo)
2193 sparse.prunetemporaryincludes(repo)
2191
2194
2192 if not partial:
2195 if not partial:
2193 repo.hook('update', parent1=xp1, parent2=xp2,
2196 repo.hook('update', parent1=xp1, parent2=xp2,
2194 error=stats.unresolvedcount)
2197 error=stats.unresolvedcount)
2195 return stats
2198 return stats
2196
2199
2197 def graft(repo, ctx, pctx, labels, keepparent=False,
2200 def graft(repo, ctx, pctx, labels, keepparent=False,
2198 keepconflictparent=False):
2201 keepconflictparent=False):
2199 """Do a graft-like merge.
2202 """Do a graft-like merge.
2200
2203
2201 This is a merge where the merge ancestor is chosen such that one
2204 This is a merge where the merge ancestor is chosen such that one
2202 or more changesets are grafted onto the current changeset. In
2205 or more changesets are grafted onto the current changeset. In
2203 addition to the merge, this fixes up the dirstate to include only
2206 addition to the merge, this fixes up the dirstate to include only
2204 a single parent (if keepparent is False) and tries to duplicate any
2207 a single parent (if keepparent is False) and tries to duplicate any
2205 renames/copies appropriately.
2208 renames/copies appropriately.
2206
2209
2207 ctx - changeset to rebase
2210 ctx - changeset to rebase
2208 pctx - merge base, usually ctx.p1()
2211 pctx - merge base, usually ctx.p1()
2209 labels - merge labels eg ['local', 'graft']
2212 labels - merge labels eg ['local', 'graft']
2210 keepparent - keep second parent if any
2213 keepparent - keep second parent if any
2211 keepparent - if unresolved, keep parent used for the merge
2214 keepparent - if unresolved, keep parent used for the merge
2212
2215
2213 """
2216 """
2214 # If we're grafting a descendant onto an ancestor, be sure to pass
2217 # If we're grafting a descendant onto an ancestor, be sure to pass
2215 # mergeancestor=True to update. This does two things: 1) allows the merge if
2218 # mergeancestor=True to update. This does two things: 1) allows the merge if
2216 # the destination is the same as the parent of the ctx (so we can use graft
2219 # the destination is the same as the parent of the ctx (so we can use graft
2217 # to copy commits), and 2) informs update that the incoming changes are
2220 # to copy commits), and 2) informs update that the incoming changes are
2218 # newer than the destination so it doesn't prompt about "remote changed foo
2221 # newer than the destination so it doesn't prompt about "remote changed foo
2219 # which local deleted".
2222 # which local deleted".
2220 mergeancestor = repo.changelog.isancestor(repo['.'].node(), ctx.node())
2223 mergeancestor = repo.changelog.isancestor(repo['.'].node(), ctx.node())
2221
2224
2222 stats = update(repo, ctx.node(), True, True, pctx.node(),
2225 stats = update(repo, ctx.node(), True, True, pctx.node(),
2223 mergeancestor=mergeancestor, labels=labels)
2226 mergeancestor=mergeancestor, labels=labels)
2224
2227
2225
2228
2226 if keepconflictparent and stats.unresolvedcount:
2229 if keepconflictparent and stats.unresolvedcount:
2227 pother = ctx.node()
2230 pother = ctx.node()
2228 else:
2231 else:
2229 pother = nullid
2232 pother = nullid
2230 parents = ctx.parents()
2233 parents = ctx.parents()
2231 if keepparent and len(parents) == 2 and pctx in parents:
2234 if keepparent and len(parents) == 2 and pctx in parents:
2232 parents.remove(pctx)
2235 parents.remove(pctx)
2233 pother = parents[0].node()
2236 pother = parents[0].node()
2234
2237
2235 with repo.dirstate.parentchange():
2238 with repo.dirstate.parentchange():
2236 repo.setparents(repo['.'].node(), pother)
2239 repo.setparents(repo['.'].node(), pother)
2237 repo.dirstate.write(repo.currenttransaction())
2240 repo.dirstate.write(repo.currenttransaction())
2238 # fix up dirstate for copies and renames
2241 # fix up dirstate for copies and renames
2239 copies.duplicatecopies(repo, repo[None], ctx.rev(), pctx.rev())
2242 copies.duplicatecopies(repo, repo[None], ctx.rev(), pctx.rev())
2240 return stats
2243 return stats
General Comments 0
You need to be logged in to leave comments. Login now